From b9155721a75d27ae88e084999ed9c810eb5342fc Mon Sep 17 00:00:00 2001 From: Ken Stevens Date: Sat, 19 Aug 2023 20:37:16 -0400 Subject: [PATCH] fix batch2 npe when no results returned (#5219) * fix npe error * spotless * spotless --- .../fhir/changelog/7_0_0/5219-batch-npe.yaml | 4 ++ .../batch2/jobs/step/ResourceIdListStep.java | 37 ++++++++++--------- .../jobs/step/ResourceIdListStepTest.java | 29 +++++++++------ 3 files changed, 41 insertions(+), 29 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5219-batch-npe.yaml diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5219-batch-npe.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5219-batch-npe.yaml new file mode 100644 index 00000000000..c67be3b55b6 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/7_0_0/5219-batch-npe.yaml @@ -0,0 +1,4 @@ +--- +type: fix +issue: 5219 +title: "Reindex batch job threw an exception when no results matched the reindex request. This has been corrected." diff --git a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStep.java b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStep.java index c5b4f289ebc..172951253c4 100644 --- a/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStep.java +++ b/hapi-fhir-storage-batch2/src/main/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStep.java @@ -90,25 +90,26 @@ public class ResourceIdListStep idBuffer = nextChunk.getTypedResourcePids().stream() + .map(TypedPidJson::new) + .collect(Collectors.toCollection(LinkedHashSet::new)); + + final UnmodifiableIterator> partition = + Iterators.partition(idBuffer.iterator(), maxBatchId); + + while (partition.hasNext()) { + final List submissionIds = partition.next(); + + totalIdsFound += submissionIds.size(); + chunkCount++; + submitWorkChunk(submissionIds, nextChunk.getRequestPartitionId(), theDataSink); + } + + ourLog.info("Submitted {} chunks with {} resource IDs", chunkCount, totalIdsFound); } - - ourLog.debug("Found {} IDs from {} to {}", nextChunk.size(), start, nextChunk.getLastDate()); - - final Set idBuffer = nextChunk.getTypedResourcePids().stream() - .map(TypedPidJson::new) - .collect(Collectors.toCollection(LinkedHashSet::new)); - - final UnmodifiableIterator> partition = Iterators.partition(idBuffer.iterator(), maxBatchId); - - while (partition.hasNext()) { - final List submissionIds = partition.next(); - - totalIdsFound += submissionIds.size(); - chunkCount++; - submitWorkChunk(submissionIds, nextChunk.getRequestPartitionId(), theDataSink); - } - - ourLog.info("Submitted {} chunks with {} resource IDs", chunkCount, totalIdsFound); return RunOutcome.SUCCESS; } diff --git a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStepTest.java b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStepTest.java index 1fb51f4cc8d..3670bfa2bc5 100644 --- a/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStepTest.java +++ b/hapi-fhir-storage-batch2/src/test/java/ca/uhn/fhir/batch2/jobs/step/ResourceIdListStepTest.java @@ -56,25 +56,30 @@ class ResourceIdListStepTest { } @ParameterizedTest - @ValueSource(ints = {1, 100, 500, 501, 2345, 10500}) + @ValueSource(ints = {0, 1, 100, 500, 501, 2345, 10500}) void testResourceIdListBatchSizeLimit(int theListSize) { List idList = generateIdList(theListSize); when(myStepExecutionDetails.getData()).thenReturn(myData); when(myParameters.getBatchSize()).thenReturn(theListSize); when(myStepExecutionDetails.getParameters()).thenReturn(myParameters); HomogeneousResourcePidList homogeneousResourcePidList = mock(HomogeneousResourcePidList.class); - when(homogeneousResourcePidList.getTypedResourcePids()).thenReturn(idList); - when(homogeneousResourcePidList.getLastDate()).thenReturn(new Date()); + if (theListSize > 0) { + when(homogeneousResourcePidList.getTypedResourcePids()).thenReturn(idList); + when(homogeneousResourcePidList.getLastDate()).thenReturn(new Date()); + when(homogeneousResourcePidList.isEmpty()).thenReturn(false); + // Ensure none of the work chunks exceed MAX_BATCH_OF_IDS in size: + doAnswer(i -> { + ResourceIdListWorkChunkJson list = i.getArgument(0); + Assertions.assertTrue(list.size() <= ResourceIdListStep.MAX_BATCH_OF_IDS, + "Id batch size should never exceed " + ResourceIdListStep.MAX_BATCH_OF_IDS); + return null; + }).when(myDataSink).accept(any(ResourceIdListWorkChunkJson.class)); + } else { + when(homogeneousResourcePidList.isEmpty()).thenReturn(true); + } when(myIdChunkProducer.fetchResourceIdsPage(any(), any(), any(), any(), any())) .thenReturn(homogeneousResourcePidList); - // Ensure none of the work chunks exceed MAX_BATCH_OF_IDS in size: - doAnswer(i -> { - ResourceIdListWorkChunkJson list = i.getArgument(0); - Assertions.assertTrue(list.size() <= ResourceIdListStep.MAX_BATCH_OF_IDS, - "Id batch size should never exceed " + ResourceIdListStep.MAX_BATCH_OF_IDS); - return null; - }).when(myDataSink).accept(any(ResourceIdListWorkChunkJson.class)); final RunOutcome run = myResourceIdListStep.run(myStepExecutionDetails, myDataSink); assertNotEquals(null, run); @@ -93,7 +98,9 @@ class ResourceIdListStepTest { // The very last chunk should be whatever is left over (if there is a remainder): int expectedLastBatchSize = theListSize % ResourceIdListStep.MAX_BATCH_OF_IDS; expectedLastBatchSize = (expectedLastBatchSize == 0) ? ResourceIdListStep.MAX_BATCH_OF_IDS : expectedLastBatchSize; - assertEquals(expectedLastBatchSize, allDataChunks.get(allDataChunks.size() - 1).size()); + if (!allDataChunks.isEmpty()) { + assertEquals(expectedLastBatchSize, allDataChunks.get(allDataChunks.size() - 1).size()); + } } private List generateIdList(int theListSize) {