Fix all broken tests

This commit is contained in:
Tadgh 2021-02-25 19:11:18 -05:00
parent 6c6b959cb5
commit 604b5d2060
5 changed files with 25 additions and 16 deletions

View File

@ -48,7 +48,11 @@ public class BulkExportGenerateResourceFilesStepListener implements StepExecutio
@Override
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString("jobUUID");
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
if (jobUuid == null) {
jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);
String exitDescription = theStepExecution.getExitStatus().getExitDescription();
myBulkExportDaoSvc.setJobToStatus(jobUuid, BulkJobStatusEnum.ERROR, exitDescription);

View File

@ -55,13 +55,13 @@ public class BulkItemReader implements ItemReader<List<ResourcePersistentId>> {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
Iterator<ResourcePersistentId> myPidIterator;
@Value("#{stepExecutionContext['resourceType']}")
private String myResourceType;
@Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}")
private Long myReadChunkSize;
@Value("#{jobParameters['"+ BulkExportJobConfig.JOB_UUID_PARAMETER+"']}")
@Value("#{jobExecutionContext['"+ BulkExportJobConfig.JOB_UUID_PARAMETER+"']}")
private String myJobUUID;
@Value("#{stepExecutionContext['resourceType']}")
private String myResourceType;
@Autowired
private IBulkExportJobDao myBulkExportJobDao;

View File

@ -46,8 +46,8 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
Map<String, Object> jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
if (jobParameters.containsKey("jobUUID")) {
addUUIDToJobContext(theChunkContext, (String)jobParameters.get("jobUUID"));
if (jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER)) {
addUUIDToJobContext(theChunkContext, (String)jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
} else {
String resourceTypes = (String)jobParameters.get("resourceTypes");
@ -79,6 +79,6 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
.getStepExecution()
.getJobExecution()
.getExecutionContext()
.putString("jobUUID", theJobUUID);
.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUUID);
}
}

View File

@ -63,10 +63,9 @@ public class GroupBulkItemReader implements ItemReader<List<ResourcePersistentId
@Value("#{stepExecutionContext['resourceType']}")
private String myResourceType;
@Value("#{jobParameters['" + BulkExportJobConfig.GROUP_ID_PARAMETER + "']}")
private String myGroupId;
@Value("#{jobParameters['"+ BulkExportJobConfig.JOB_UUID_PARAMETER+"']}")
@Value("#{jobExecutionContext['"+ BulkExportJobConfig.JOB_UUID_PARAMETER+"']}")
private String myJobUUID;
@Value("#{jobParameters['" + BulkExportJobConfig.READ_CHUNK_PARAMETER + "']}")
private Long myReadChunkSize;

View File

@ -334,7 +334,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
// Fetch the job again
status = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
assertEquals(BulkJobStatusEnum.COMPLETE, status.getStatus());
assertEquals(2, status.getFiles().size());
assertEquals(5, status.getFiles().size());
// Iterate over the files
for (IBulkDataExportSvc.FileEntry next : status.getFiles()) {
@ -342,17 +342,24 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
assertEquals(Constants.CT_FHIR_NDJSON, nextBinary.getContentType());
String nextContents = new String(nextBinary.getContent(), Constants.CHARSET_UTF8);
ourLog.info("Next contents for type {}:\n{}", next.getResourceType(), nextContents);
if ("Patient".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"value\":\"PAT0\""));
assertEquals(10, nextContents.split("\n").length);
} else if ("Observation".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"subject\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(10, nextContents.split("\n").length);
}else if ("Immunization".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"patient\":{\"reference\":\"Patient/PAT0\"}}\n"));
assertEquals(10, nextContents.split("\n").length);
} else if ("CareTeam".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"id\":\"CT0\""));
assertEquals(10, nextContents.split("\n").length);
} else if ("Group".equals(next.getResourceType())) {
assertThat(nextContents, containsString("\"id\":\"G0\""));
assertEquals(1, nextContents.split("\n").length);
} else {
fail(next.getResourceType());
fail();
}
}
}
@ -607,6 +614,7 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
private void createResources() {
Group group = new Group();
group.setId("G0");
for (int i = 0; i < 10; i++) {
Patient patient = new Patient();
patient.setId("PAT" + i);
@ -642,12 +650,10 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
CareTeam careTeam = new CareTeam();
careTeam.setId("CT" + i);
// FIXME GGG note if you uncomment addPartipant and comment out setSubject the test passes
// careTeam.addParticipant().setMember(new Reference(patId));
careTeam.setSubject(new Reference(patId)); // This maps to the "patient" search parameter on CareTeam
myCareTeamDao.update(careTeam);
}
myPatientGroupId = myGroupDao.create(group).getId();
myPatientGroupId = myGroupDao.update(group).getId();
}