Set batch export job results count to number of resources processed instead of 0. (#4172)

* Add count to batch export admin page results and new add new unit test.

* Fix failing unit test in WriteBinaryStepTest.

* Add changelog.

* Small changelog fix.
This commit is contained in:
Luke deGruchy 2022-10-20 14:27:01 -04:00 committed by GitHub
parent 0c5b64cce7
commit c8312662d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 52 additions and 5 deletions

View File

@ -0,0 +1,4 @@
---
type: fix
issue: 4173
title: "When triggering a batch export, the number of resources processed is returned as zero. This is now fixed to return the total number of resources processed across a single or multiple bundles."

View File

@ -1,5 +1,7 @@
package ca.uhn.fhir.jpa.bulk;
import ca.uhn.fhir.batch2.api.IJobPersistence;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.model.BulkExportJobResults;
import ca.uhn.fhir.jpa.api.svc.IBatch2JobRunner;
@ -44,6 +46,8 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.is;
@ -56,6 +60,7 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(BulkExportUseCaseTest.class);
@ -63,6 +68,9 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
@Autowired
private IBatch2JobRunner myJobRunner;
@Autowired
private IJobPersistence myJobPersistence;
@Nested
public class SpecConformanceTests {
@Test
@ -171,6 +179,40 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
}
}
@Test
public void testResourceCountIsCorrect() {
int patientCount = 5;
for (int i = 0; i < patientCount; i++) {
Patient patient = new Patient();
patient.setId("pat-" + i);
myPatientDao.update(patient);
}
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(Collections.singleton("Patient"));
options.setFilters(Collections.emptySet());
options.setExportStyle(BulkDataExportOptions.ExportStyle.SYSTEM);
options.setOutputFormat(Constants.CT_FHIR_NDJSON);
Batch2JobStartResponse startResponse = myJobRunner.startNewJob(BulkExportUtils.createBulkExportJobParametersFromExportOptions(options));
assertNotNull(startResponse);
final String jobId = startResponse.getJobId();
// Run a scheduled pass to build the export
myBatch2JobHelper.awaitJobCompletion(startResponse.getJobId());
final Optional<JobInstance> optJobInstance = myJobPersistence.fetchInstance(jobId);
assertNotNull(optJobInstance);
assertTrue(optJobInstance.isPresent());
final JobInstance jobInstance = optJobInstance.get();
assertEquals(patientCount, jobInstance.getCombinedRecordsProcessed());
}
private void logContentTypeAndResponse(Header[] headers, String response) {
ourLog.info("**************************");
ourLog.info("Content-Type is: {}", headers[0]);
@ -705,11 +747,11 @@ public class BulkExportUseCaseTest extends BaseResourceProviderR4Test {
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.GROUP, theResourceTypes, theFilters, theGroupId);
}
BulkExportJobResults startSystemBulkExportJobAndAwaitCompletion(HashSet<String> theResourceTypes, HashSet<String> theFilters) {
BulkExportJobResults startSystemBulkExportJobAndAwaitCompletion(Set<String> theResourceTypes, Set<String> theFilters) {
return startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle.SYSTEM, theResourceTypes, theFilters, null);
}
BulkExportJobResults startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle theExportStyle, HashSet<String> theResourceTypes, HashSet<String> theFilters, String theGroupOrPatientId) {
BulkExportJobResults startBulkExportJobAndAwaitCompletion(BulkDataExportOptions.ExportStyle theExportStyle, Set<String> theResourceTypes, Set<String> theFilters, String theGroupOrPatientId) {
BulkDataExportOptions options = new BulkDataExportOptions();
options.setResourceTypes(theResourceTypes);
options.setFilters(theFilters);

View File

@ -64,9 +64,10 @@ public class WriteBinaryStep implements IJobStepWorker<BulkExportJobParameters,
@Nonnull IJobDataSink<BulkExportBinaryFileId> theDataSink) throws JobExecutionFailedException {
BulkExportExpandedResources expandedResources = theStepExecutionDetails.getData();
final int numResourcesProcessed = expandedResources.getStringifiedResources().size();
ourLog.info("Write binary step of Job Export");
ourLog.info("Writing {} resources to binary file", expandedResources.getStringifiedResources().size());
ourLog.info("Writing {} resources to binary file", numResourcesProcessed);
@SuppressWarnings("unchecked")
IFhirResourceDao<IBaseBinary> binaryDao = myDaoRegistry.getResourceDao("Binary");
@ -113,7 +114,7 @@ public class WriteBinaryStep implements IJobStepWorker<BulkExportJobParameters,
processedRecordsCount,
expandedResources.getResourceType());
return RunOutcome.SUCCESS;
return new RunOutcome(numResourcesProcessed);
}
/**

View File

@ -137,7 +137,7 @@ public class WriteBinaryStepTest {
RunOutcome outcome = myFinalStep.run(input, sink);
// verify
assertEquals(RunOutcome.SUCCESS, outcome);
assertEquals(new RunOutcome(stringified.size()).getRecordsProcessed(), outcome.getRecordsProcessed());
ArgumentCaptor<IBaseBinary> binaryCaptor = ArgumentCaptor.forClass(IBaseBinary.class);
verify(binaryDao)