Issue 4214 bulk export group observations missed when search parameter indexing enabled (#4241)

* Add failing test

* Add failing test for hsearch reference without type

* Modify cache to store forcedid type

* Disable failing test until finding out if it is a bug and strategy for fix in that case

* Remove fixmes and commented lines

* Update test. Add changelog.

* Added test that passes on this branch.

* Added Encounters to test case.

Co-authored-by: juan.marchionatto <juan.marchionatto@smilecdr.com>
Co-authored-by: Tadgh <garygrantgraham@gmail.com>
Co-authored-by: kylejule <kyle.jule@smilecdr.com>
This commit is contained in:
jmarchionatto 2022-11-11 15:08:41 -05:00 committed by GitHub
parent e5cf3ef396
commit 2df09c0cf3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 523 additions and 6 deletions

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 4214
title: "Before, When exporting a group including patients referenced in observations, observations were missed from output if `Search Parameters Indexing` was enabled.
This is now fixed."

View File

@ -421,7 +421,7 @@ public class IdHelperService implements IIdHelperService {
Optional<String> forcedId = translatePidIdToForcedIdWithCache(theId);
if (forcedId.isPresent()) {
retVal.setValue(theResourceType + '/' + forcedId.get());
retVal.setValue(forcedId.get());
} else {
retVal.setValue(theResourceType + '/' + theId);
}
@ -431,7 +431,7 @@ public class IdHelperService implements IIdHelperService {
@Override
public Optional<String> translatePidIdToForcedIdWithCache(ResourcePersistentId theId) {
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(ForcedId::asTypedFhirResourceId));
}
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
@ -609,7 +609,7 @@ public class IdHelperService implements IIdHelperService {
for (ForcedId forcedId : forcedIds) {
Long nextResourcePid = forcedId.getResourceId();
Optional<String> nextForcedId = Optional.of(forcedId.getForcedId());
Optional<String> nextForcedId = Optional.of(forcedId.asTypedFhirResourceId());
retVal.put(nextResourcePid, nextForcedId);
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, nextForcedId);
}
@ -642,7 +642,7 @@ public class IdHelperService implements IIdHelperService {
populateAssociatedResourceId(theResourceType, theForcedId, theResourcePersistentId);
}
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theResourcePersistentId.getIdAsLong(), Optional.of(theForcedId));
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theResourcePersistentId.getIdAsLong(), Optional.of(theResourceType + "/" + theForcedId));
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId);
myMemoryCacheService.putAfterCommit(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theResourcePersistentId);
} else {

View File

@ -0,0 +1,107 @@
package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
import ca.uhn.fhir.jpa.batch.models.Batch2JobStartResponse;
import ca.uhn.fhir.jpa.test.BaseJpaTest;
import ca.uhn.fhir.jpa.test.config.TestHSearchAddInConfig;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.jpa.util.BulkExportUtils;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.util.JsonUtil;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Meta;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.util.ResourceUtils;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Set;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {
TestR4Config.class
// pick up elastic or lucene engine:
,TestHSearchAddInConfig.LuceneFilesystem.class
})
public class BulkGroupExportWithIndexedSearchParametersTest extends BaseJpaTest {
private final FhirContext myCtx = FhirContext.forR4Cached();
@Autowired private DaoConfig myDaoConfig;
@Autowired private IBatch2JobRunner myJobRunner;
@Autowired private PlatformTransactionManager myTxManager;
@Autowired
@Qualifier("mySystemDaoR4")
protected IFhirSystemDao<Bundle, Meta> mySystemDao;
@BeforeEach
void setUp() {
myDaoConfig.setAdvancedHSearchIndexing(true);
}
@Test
public void groupBulkExportWithIndexedSearchParametersTest() throws Exception {
// Create Group and associated resources from json input
File jsonInputUrl = ResourceUtils.getFile(ResourceUtils.CLASSPATH_URL_PREFIX + "bulk-group-export/bundle-group-upload.json");
String jsonBundle = Files.readString(Paths.get(jsonInputUrl.toURI()), StandardCharsets.UTF_8);
Bundle inputBundle = myFhirContext.newJsonParser().parseResource(Bundle.class, jsonBundle);
mySystemDao.transaction(mySrd, inputBundle);
// set the export options
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Set.of("Patient", "Observation", "Group"));
options.setGroupId(new IdType("Group", "G1"));
options.setExportStyle(BulkDataExportOptions.ExportStyle.GROUP);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
BulkExportJobResults jobResults = getBulkExportJobResults(options);
assertThat(jobResults.getResourceTypeToBinaryIds().keySet(), containsInAnyOrder("Patient", "Observation", "Group"));
}
private BulkExportJobResults getBulkExportJobResults(BulkDataExportOptions theOptions) {
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(theOptions));
assertNotNull(startResponse);
// Run a scheduled pass to build the export
myBatch2JobHelper.awaitJobCompletion(startResponse.getJobId());
await().until(() -> myJobRunner.getJobInfo(startResponse.getJobId()).getReport() != null);
// Iterate over the files
String report = myJobRunner.getJobInfo(startResponse.getJobId()).getReport();
return JsonUtil.deserialize(report, BulkExportJobResults.class);
}
@Override
protected FhirContext getFhirContext() { return myCtx; }
@Override
protected PlatformTransactionManager getTxManager() { return myTxManager; }
}

View File

@ -2274,6 +2274,37 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest impl
}
}
@Disabled("while finding out strategy for this fix")
@Nested
public class ReferenceParameter {
@BeforeEach
public void setup() {
myDaoConfig.setAdvancedHSearchIndexing(true);
}
@AfterEach
public void tearDown() {
DaoConfig defaultConfig = new DaoConfig();
myDaoConfig.setAdvancedHSearchIndexing(defaultConfig.isAdvancedHSearchIndexing());
}
@Test
public void observationSubjectReferenceTest() {
Patient patient = new Patient();
DaoMethodOutcome outcome = myPatientDao.create(patient, mySrd);
IIdType patId = outcome.getId();
IIdType obsId = myTestDataBuilder.createObservation(List.of(myTestDataBuilder.withSubject(patId.toString())));
myCaptureQueriesListener.clear();
assertFindId("when exact", obsId, "/Observation?subject=" + patId.getVersionIdPartAsLong());
assertEquals(myDaoConfig.isAdvancedHSearchIndexing() ? 1 : 2,
myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "we build the bundle with no sql");
}
}
@Nested
class CompositeSearch extends CompositeSearchParameterTestCases {
CompositeSearch() {

View File

@ -0,0 +1,290 @@
{
"resourceType": "Bundle",
"id": "bundle-transaction",
"meta": {
"lastUpdated": "2021-04-19T20:24:48.194+00:00"
},
"type": "transaction",
"entry": [
{
"fullUrl": "http://example.org/fhir/Patient/A1",
"resource": {
"resourceType": "Patient",
"id": "A1",
"identifier": [
{
"use": "usual",
"type": {
"coding": [
{
"system": "http://terminology.hl7.org/CodeSystem/v2-0203",
"code": "MR"
}
]
},
"system": "urn:oid:1.2.36.146.595.217.0.1",
"value": "12345"
}
],
"name": [
{
"family": "updated new name",
"given": [
"Amy"
]
}
],
"gender": "male",
"birthDate": "1977-05-24"
},
"request": {
"method": "PUT",
"url": "Patient/A1"
}
},
{
"fullUrl": "http://example.org/fhir/Patient/A2",
"resource": {
"resourceType": "Patient",
"id": "A2",
"meta": {
"versionId": "1",
"lastUpdated": "2021-11-03T15:18:41.352-04:00",
"source": "#HrhETo5Ilp5ppTIn"
},
"name": [
{
"text": "Wilma Flintstone",
"family": "Flintstone",
"given": [
"Wilma",
"Wendy"
],
"prefix": [
"Mrs."
]
}
],
"telecom": [
{
"system": "phone",
"value": "555-675-1235",
"use": "home"
}
],
"gender": "female",
"birthDate": "1961-05-20",
"address": [
{
"line": [
"14 TH AVENUE"
],
"city": "Bedrock",
"state": "MA",
"country": "US"
}
]
},
"request": {
"method": "PUT",
"url": "Patient/A2"
}
},
{
"fullUrl": "http://example.org/fhir/Patient/A3",
"resource": {
"resourceType": "Patient",
"id": "A3",
"meta": {
"versionId": "1",
"lastUpdated": "2021-11-03T15:18:41.352-04:00",
"source": "#HrhETo5Ilp5ppTIn"
},
"name": [
{
"text": "Wilma Flintstone",
"family": "Flintstone",
"given": [
"Wilma",
"Wendy"
],
"prefix": [
"Mrs."
]
}
],
"telecom": [
{
"system": "phone",
"value": "555-675-1235",
"use": "home"
}
],
"gender": "female",
"birthDate": "1961-05-20",
"address": [
{
"line": [
"14 TH AVENUE"
],
"city": "Bedrock",
"state": "MA",
"country": "US"
}
]
},
"request": {
"method": "PUT",
"url": "Patient/A3"
}
},
{
"fullUrl": "http://example.org/fhir/Group/G1",
"resource": {
"resourceType": "Group",
"id": "G1",
"text": {
"status": "additional"
},
"type": "person",
"actual": true,
"member": [
{
"entity": {
"reference": "Patient/A1"
},
"period": {
"start": "2021-01-01"
}
},
{
"entity": {
"reference": "Patient/A2"
},
"period": {
"start": "2021-01-01"
}
}
]
},
"request": {
"method": "PUT",
"url": "Group/G1"
}
},
{
"fullUrl": "http://example.org/fhir/Group/G2",
"resource": {
"resourceType": "Group",
"id": "G2",
"text": {
"status": "additional"
},
"type": "person",
"actual": true,
"member": [
{
"entity": {
"reference": "Patient/A1"
},
"period": {
"start": "2021-01-01"
}
},
{
"entity": {
"reference": "Patient/A3"
},
"period": {
"start": "2021-01-01"
}
}
]
},
"request": {
"method": "PUT",
"url": "Group/G2"
}
},
{
"fullUrl": "http://example.org/fhir/Observation/O1",
"resource": {
"resourceType": "Observation",
"id": "O1",
"meta": {
"versionId": "1",
"lastUpdated": "2021-11-03T15:09:32.240-04:00",
"source": "#QZNuzoWSI2wh1Fyy"
},
"status": "registered",
"code": {
"coding": [
{
"system": "http://loinc.org",
"code": "3141-9",
"display": "Body weight Measured"
}
]
},
"subject": {
"reference": "Patient/A1",
"identifier": {
"system": "http://hospital_abc/MRN",
"value": "01374547601"
}
},
"effectiveDateTime": "2020-01-05T00:00:00Z",
"valueQuantity": {
"value": 3.0,
"unit": "kg",
"system": "http://unitsofmeasure.org",
"code": "kg"
}
},
"request": {
"method": "PUT",
"url": "Observation/O1"
}
},
{
"fullUrl": "http://example.org/fhir/Observation/O2",
"resource": {
"resourceType": "Observation",
"id": "O2",
"meta": {
"versionId": "1",
"lastUpdated": "2021-11-03T15:09:32.240-04:00",
"source": "#QZNuzoWSI2wh1Fyy"
},
"status": "registered",
"code": {
"coding": [
{
"system": "http://loinc.org",
"code": "3141-9",
"display": "Body weight Measured"
}
]
},
"subject": {
"reference": "Patient/A2",
"identifier": {
"system": "http://hospital_abc/MRN",
"value": "01374547123"
}
},
"effectiveDateTime": "2020-01-05T00:00:00Z",
"valueQuantity": {
"value": 30.0,
"unit": "kg",
"system": "http://unitsofmeasure.org",
"code": "kg"
}
},
"request": {
"method": "PUT",
"url": "Observation/O2"
}
}
]
}

View File

@ -75,7 +75,6 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
}
public void setTransientForcedId(String theTransientForcedId) {
assert !defaultString(theTransientForcedId).contains("/") : "Forced ID should not include type: " + theTransientForcedId;
myTransientForcedId = theTransientForcedId;
}

View File

@ -135,4 +135,8 @@ public class ForcedId extends BasePartitionable {
b.append("resourcePid", myResourcePid);
return b.toString();
}
public String asTypedFhirResourceId() {
return getResourceType() + "/" + getForcedId();
}
}

View File

@ -597,6 +597,87 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
}
@Test
public void testGroupExport_includesObservationsAndEncountersOfPatientsInExportedGroup_whenLuceneIdexingEnabled() {
// Enable Lucene indexing
myDaoConfig.setAllowContainsSearches(true);
myDaoConfig.setAdvancedHSearchIndexing(true);
Patient patient = new Patient();
patient.setId("A1");
patient.setActive(true);
myClient.update().resource(patient).execute();
Patient patient2 = new Patient();
patient2.setId("A2");
patient2.setActive(true);
myClient.update().resource(patient2).execute();
Patient patient3 = new Patient();
patient3.setId("A3");
patient3.setActive(true);
myClient.update().resource(patient3).execute();
Group group1 = new Group();
group1.setActual(true);
group1.setType(Group.GroupType.PERSON);
group1.setId("G1");
group1.setActive(true);
group1.addMember().getEntity().setReference("Patient/A1");
group1.addMember().getEntity().setReference("Patient/A2");
myClient.update().resource(group1).execute();
Group group2 = new Group();
group2.setActual(true);
group2.setType(Group.GroupType.PERSON);
group2.setId("G2");
group2.setActive(true);
group2.addMember().getEntity().setReference("Patient/A1");
group2.addMember().getEntity().setReference("Patient/A3");
myClient.update().resource(group2).execute();
Observation o = new Observation();
o.setSubject(new Reference("Patient/A1"));
o.setId("obs-a1");
myClient.update().resource(o).execute();
Observation o2 = new Observation();
o2.setSubject(new Reference("Patient/A2"));
o2.setId("obs-a2");
myClient.update().resource(o2).execute();
Encounter e = new Encounter();
e.setSubject(new Reference("Patient/A1"));
e.setId("enc-a1");
myClient.update().resource(e).execute();
Encounter e2 = new Encounter();
e2.setSubject(new Reference("Patient/A2"));
e2.setId("enc-a2");
myClient.update().resource(e2).execute();
HashSet<String> resourceTypes = Sets.newHashSet("Observation", "Patient", "Encounter", "Group");
BulkExportJobResults bulkExportJobResults = startGroupBulkExportJobAndAwaitCompletion(resourceTypes, new HashSet<>(), "G1");
Map<String, List<IBaseResource>> typeToResources = convertJobResultsToResources(bulkExportJobResults);
assertThat(typeToResources.get("Patient"), hasSize(2));
assertThat(typeToResources.get("Group"), hasSize(1));
assertThat(typeToResources.get("Observation"), hasSize(2));
assertThat(typeToResources.get("Encounter"), hasSize(2));
Map<String, String> typeToContents = convertJobResultsToStringContents(bulkExportJobResults);
assertThat(typeToContents.get("Patient"), containsString("A1"));
assertThat(typeToContents.get("Patient"), containsString("A2"));
assertThat(typeToContents.get("Observation"), containsString("obs-a1"));
assertThat(typeToContents.get("Observation"), containsString("obs-a2"));
assertThat(typeToContents.get("Encounter"), containsString("enc-a1"));
assertThat(typeToContents.get("Encounter"), containsString("enc-a2"));
}
@Test
public void testGroupExportPatientAndOtherResources() {

View File

@ -173,7 +173,7 @@ public class SearchParamValidatingInterceptor {
private boolean isNewSearchParam(RuntimeSearchParam theSearchParam, Set<String> theExistingIds) {
return theExistingIds
.stream()
.noneMatch(resId -> resId.equals(theSearchParam.getId().getIdPart()));
.noneMatch(resId -> resId.substring(resId.indexOf("/")+1).equals(theSearchParam.getId().getIdPart()));
}
private void validateStandardSpOnUpdate(RequestDetails theRequestDetails, RuntimeSearchParam runtimeSearchParam, SearchParameterMap searchParameterMap) {