Fix tests

This commit is contained in:
Martha 2024-06-27 18:41:50 -07:00
parent 58b23992df
commit 318ff95baf
6 changed files with 7 additions and 8 deletions

View File

@ -142,7 +142,7 @@ public class ConsumeFilesStepR4Test extends BasePartitioningR4Test {
// Validate
if (partitionEnabled) {
assertEquals(7, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
assertEquals(8, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
} else {
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
}

View File

@ -2883,7 +2883,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
createPatient(withPartition(1), withActiveTrue());
myCaptureQueriesListener.clear();
addReadDefaultPartition();
Bundle outcome = mySystemDao.transaction(mySrd, input.get());
ourLog.debug("Resp: {}", myFhirContext.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome));
myCaptureQueriesListener.logSelectQueriesForCurrentThread();

View File

@ -73,6 +73,7 @@ public class ResourceIdListWorkChunkJson implements IModelJson {
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("ids", myTypedPids)
.append("requestPartitionId", myRequestPartitionId)
.toString();
}

View File

@ -65,7 +65,7 @@ public class GenerateRangeChunksStepTest {
Arguments.of(List.of("Observation?", "Patient?"), threePartitions, false, 6),
Arguments.of(List.of("Observation?", "Patient?", "Practitioner?"), threePartitions, true, 3),
Arguments.of(List.of("Observation?status=final", "Patient?"), partition1, false, 2),
Arguments.of(List.of("Observation?status=final"), threePartitions, false, 2)
Arguments.of(List.of("Observation?status=final"), threePartitions, false, 3)
);
}

View File

@ -7,7 +7,6 @@ import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.parameters.JobParameters;
import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.WorkChunk;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.pid.HomogeneousResourcePidList;
import ca.uhn.fhir.jpa.api.pid.IResourcePidList;
import ca.uhn.fhir.jpa.api.pid.IResourcePidStream;
@ -62,7 +61,7 @@ public class LoadIdsStepTest {
@Test
public void testGenerateSteps() {
JobParameters parameters = new JobParameters();
ChunkRangeJson range = new ChunkRangeJson(DATE_1, DATE_END).setPartitionId(RequestPartitionId.allPartitions());
ChunkRangeJson range = new ChunkRangeJson(DATE_1, DATE_END);
String instanceId = "instance-id";
JobInstance jobInstance = JobInstance.fromInstanceId(instanceId);
String chunkId = "chunk-id";
@ -70,8 +69,7 @@ public class LoadIdsStepTest {
// First Execution
when(myBatch2DaoSvc.fetchResourceIdStream(eq(DATE_1), eq(DATE_END), isNull(), isNull()))
.thenReturn(createIdChunk());
when(myBatch2DaoSvc.fetchResourceIdStream(eq(DATE_1), eq(DATE_END), isNull(), isNull())).thenReturn(createIdChunk());
mySvc.run(details, mySink);

View File

@ -88,7 +88,8 @@ class ResourceIdListStepTest {
assertThat(allDataChunks).hasSize(expectedBatchCount);
// Ensure that all chunks except the very last one are MAX_BATCH_OF_IDS in length
for (ResourceIdListWorkChunkJson dataChunk : allDataChunks) {
for (int i = 0; i < expectedBatchCount - 1; i++) {
ResourceIdListWorkChunkJson dataChunk = allDataChunks.get(i);
assertEquals(ResourceIdListStep.MAX_BATCH_OF_IDS, dataChunk.size());
assertEquals(partitionId, dataChunk.getRequestPartitionId());
}