Add constants, fix up some tests
This commit is contained in:
parent
02f38e9532
commit
7b663bfe99
|
@ -28,7 +28,9 @@ import org.springframework.context.annotation.Import;
|
|||
//When you define a new batch job, add it here.
|
||||
@Import({
|
||||
CommonBatchJobConfig.class,
|
||||
BulkExportJobConfig.class,})
|
||||
BulkExportJobConfig.class
|
||||
})
|
||||
public class BatchJobsConfig {
|
||||
//Empty config, as this is just an aggregator for all the various batch jobs defined around the system.
|
||||
public static final String BULK_EXPORT_JOB_NAME = "bulkExportJob";
|
||||
public static final String GROUP_BULK_EXPORT_JOB_NAME = "groupBulkExportJob";
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.processors.PidToIBaseResourceProcessor;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
|
@ -46,6 +47,7 @@ import java.util.List;
|
|||
*/
|
||||
@Configuration
|
||||
public class BulkExportJobConfig {
|
||||
|
||||
public static final String JOB_UUID_PARAMETER = "jobUUID";
|
||||
public static final String READ_CHUNK_PARAMETER = "readChunkSize";
|
||||
public static final String EXPAND_MDM_PARAMETER = "expandMdm";
|
||||
|
@ -70,7 +72,7 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
@Lazy
|
||||
public Job bulkExportJob() {
|
||||
return myJobBuilderFactory.get("bulkExportJob")
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
.validator(bulkJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
.next(partitionStep())
|
||||
|
@ -81,7 +83,7 @@ public class BulkExportJobConfig {
|
|||
@Bean
|
||||
@Lazy
|
||||
public Job groupBulkExportJob() {
|
||||
return myJobBuilderFactory.get("groupBulkExportJob")
|
||||
return myJobBuilderFactory.get(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
.validator(groupBulkJobParameterValidator())
|
||||
.validator(bulkJobParameterValidator())
|
||||
.start(createBulkExportEntityStep())
|
||||
|
|
|
@ -189,6 +189,10 @@ public class BulkDataExportProvider {
|
|||
@IdParam IIdType theIdParam,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theOutputFormat,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theType,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_SINCE, min = 0, max = 1, typeName = "instant") IPrimitiveType<Date> theSince,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_TYPE_FILTER, min = 0, max = 1, typeName = "string") IPrimitiveType<String> theTypeFilter,
|
||||
@OperationParam(name = JpaConstants.PARAM_EXPORT_MDM, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theMdm,
|
||||
|
||||
ServletRequestDetails theRequestDetails
|
||||
) {
|
||||
|
||||
|
@ -207,10 +211,20 @@ public class BulkDataExportProvider {
|
|||
|
||||
//TODO GGG eventually, we will support these things.
|
||||
Set<String> filters = null;
|
||||
Date since = null;
|
||||
boolean theMdm = false;
|
||||
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(outputFormat, resourceTypes, since, filters, theIdParam, theMdm));
|
||||
Date since = null;
|
||||
if (theSince != null) {
|
||||
since = theSince.getValue();
|
||||
}
|
||||
|
||||
boolean mdm = false;
|
||||
if (theMdm != null) {
|
||||
mdm = theMdm.getValue();
|
||||
}
|
||||
if (theTypeFilter != null) {
|
||||
filters = ArrayUtil.commaSeparatedListToCleanSet(theTypeFilter.getValueAsString());
|
||||
}
|
||||
IBulkDataExportSvc.JobInfo outcome = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(outputFormat, resourceTypes, since, filters, theIdParam, mdm));
|
||||
|
||||
String serverBase = getServerBase(theRequestDetails);
|
||||
String pollLocation = serverBase + "/" + JpaConstants.OPERATION_EXPORT_POLL_STATUS + "?" + JpaConstants.PARAM_EXPORT_POLL_STATUS_JOB_ID + "=" + outcome.getJobId();
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions;
|
||||
|
@ -105,11 +106,11 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
private IBatchJobSubmitter myJobSubmitter;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("bulkExportJob")
|
||||
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
private org.springframework.batch.core.Job myBulkExportJob;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("groupBulkExportJob")
|
||||
@Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
private org.springframework.batch.core.Job myGroupBulkExportJob;
|
||||
|
||||
private final int myRetentionPeriod = (int) (2 * DateUtils.MILLIS_PER_HOUR);
|
||||
|
@ -139,8 +140,9 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
String jobUuid = bulkExportJobEntity.getJobId();
|
||||
String theGroupId = getGroupIdIfPresent(bulkExportJobEntity.getRequest());
|
||||
String theMdmExpand= getMdmIfPresent(bulkExportJobEntity.getRequest());
|
||||
try {
|
||||
processJob(jobUuid, theGroupId);
|
||||
processJob(jobUuid, theGroupId, theMdmExpand);
|
||||
} catch (Exception e) {
|
||||
ourLog.error("Failure while preparing bulk export extract", e);
|
||||
myTxTemplate.execute(t -> {
|
||||
|
@ -166,6 +168,16 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
private String getMdmIfPresent(String theRequestString) {
|
||||
Map<String, String[]> stringMap = UrlUtil.parseQueryString(theRequestString);
|
||||
if (stringMap != null) {
|
||||
String[] strings = stringMap.get(JpaConstants.PARAM_EXPORT_MDM);
|
||||
if (strings != null) {
|
||||
return String.join(",", strings);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
|
@ -215,7 +227,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
|
||||
}
|
||||
|
||||
private void processJob(String theJobUuid, String theGroupId) {
|
||||
private void processJob(String theJobUuid, String theGroupId, String theExpandMdm) {
|
||||
JobParametersBuilder parameters = new JobParametersBuilder()
|
||||
.addString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUuid)
|
||||
.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
|
||||
|
@ -225,6 +237,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
try {
|
||||
if (!StringUtils.isBlank(theGroupId)) {
|
||||
parameters.addString(BulkExportJobConfig.GROUP_ID_PARAMETER, theGroupId);
|
||||
parameters.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, theExpandMdm);
|
||||
myJobSubmitter.runJob(myGroupBulkExportJob, parameters.toJobParameters());
|
||||
} else {
|
||||
myJobSubmitter.runJob(myBulkExportJob, parameters.toJobParameters());
|
||||
|
@ -284,8 +297,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
|
|||
if (theBulkDataExportOptions instanceof GroupBulkDataExportOptions) {
|
||||
GroupBulkDataExportOptions groupOptions = (GroupBulkDataExportOptions) theBulkDataExportOptions;
|
||||
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_GROUP_ID).append("=").append(groupOptions.getGroupId().getValue());
|
||||
//TODO GGG eventually we will support this
|
||||
// requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(groupOptions.isMdm());
|
||||
requestBuilder.append("&").append(JpaConstants.PARAM_EXPORT_MDM).append("=").append(groupOptions.isMdm());
|
||||
}
|
||||
String request = requestBuilder.toString();
|
||||
|
||||
|
|
|
@ -49,6 +49,8 @@ import java.util.concurrent.TimeUnit;
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -306,11 +308,14 @@ public class BulkDataExportProviderTest {
|
|||
|
||||
InstantType now = InstantType.now();
|
||||
|
||||
|
||||
Parameters input = new Parameters();
|
||||
StringType obsTypeFilter = new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE");
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_OUTPUT_FORMAT, new StringType(Constants.CT_FHIR_NDJSON));
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE, new StringType("Observation, DiagnosticReport"));
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_SINCE, now);
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, new StringType("Observation?code=OBSCODE,DiagnosticReport?code=DRCODE"));
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_MDM, true);
|
||||
input.addParameter(JpaConstants.PARAM_EXPORT_TYPE_FILTER, obsTypeFilter);
|
||||
|
||||
ourLog.info(myCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input));
|
||||
|
||||
|
@ -329,11 +334,9 @@ public class BulkDataExportProviderTest {
|
|||
GroupBulkDataExportOptions options = myGroupBulkDataExportOptionsCaptor.getValue();
|
||||
assertEquals(Constants.CT_FHIR_NDJSON, options.getOutputFormat());
|
||||
assertThat(options.getResourceTypes(), containsInAnyOrder("Observation", "DiagnosticReport"));
|
||||
//TODO GGG eventually, we will support since in group exports
|
||||
assertThat(options.getSince(), nullValue());
|
||||
//TODO GGG eventually, we will support filters in group exports
|
||||
assertThat(options.getFilters(), nullValue());
|
||||
assertThat(options.getSince(), notNullValue());
|
||||
assertThat(options.getFilters(), notNullValue());
|
||||
assertEquals(GROUP_ID, options.getGroupId().getValue());
|
||||
assertFalse(options.isMdm());
|
||||
assertThat(options.isMdm(), is(equalTo(true)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.bulk;
|
|||
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
|
||||
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
|
||||
import ca.uhn.fhir.jpa.bulk.api.BulkDataExportOptions;
|
||||
import ca.uhn.fhir.jpa.bulk.api.GroupBulkDataExportOptions;
|
||||
|
@ -89,11 +90,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
private JobExplorer myJobExplorer;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("bulkExportJob")
|
||||
@Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
|
||||
private Job myBulkJob;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("groupBulkExportJob")
|
||||
@Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
|
||||
private Job myGroupBulkJob;
|
||||
|
||||
private IIdType myPatientGroupId;
|
||||
|
@ -490,7 +491,8 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
|
||||
public void awaitAllBulkJobCompletions() {
|
||||
List<JobInstance> bulkExport = myJobExplorer.findJobInstancesByJobName("bulkExportJob", 0, 100);
|
||||
List<JobInstance> bulkExport = myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.BULK_EXPORT_JOB_NAME, 0, 100);
|
||||
bulkExport.addAll(myJobExplorer.findJobInstancesByJobName(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME, 0, 100));
|
||||
if (bulkExport.isEmpty()) {
|
||||
fail("There are no bulk export jobs running!");
|
||||
}
|
||||
|
@ -615,16 +617,11 @@ public class BulkDataExportSvcImplR4Test extends BaseJpaR4Test {
|
|||
// Create a bulk job
|
||||
IBulkDataExportSvc.JobInfo jobDetails = myBulkDataExportSvc.submitJob(new GroupBulkDataExportOptions(null, Sets.newHashSet("Immunization", "Observation"), null, null, myPatientGroupId, true));
|
||||
|
||||
GroupBulkExportJobParametersBuilder paramBuilder = new GroupBulkExportJobParametersBuilder();
|
||||
paramBuilder.setGroupId(myPatientGroupId.getIdPart());
|
||||
paramBuilder.setMdm(true);
|
||||
paramBuilder.setJobUUID(jobDetails.getJobId());
|
||||
paramBuilder.setReadChunkSize(10L);
|
||||
myBulkDataExportSvc.buildExportFiles();
|
||||
awaitAllBulkJobCompletions();
|
||||
|
||||
JobExecution jobExecution = myBatchJobSubmitter.runJob(myGroupBulkJob, paramBuilder.toJobParameters());
|
||||
|
||||
awaitJobCompletion(jobExecution);
|
||||
IBulkDataExportSvc.JobInfo jobInfo = myBulkDataExportSvc.getJobInfoOrThrowResourceNotFound(jobDetails.getJobId());
|
||||
assertEquals("/$export?_outputFormat=application%2Ffhir%2Bndjson&_type=Observation,Immunization&_groupId=" + myPatientGroupId +"&_mdm=true", jobInfo.getRequest());
|
||||
|
||||
assertThat(jobInfo.getStatus(), equalTo(BulkJobStatusEnum.COMPLETE));
|
||||
assertThat(jobInfo.getFiles().size(), equalTo(2));
|
||||
|
|
|
@ -190,10 +190,9 @@ public class JpaConstants {
|
|||
public static final Object PARAM_EXPORT_GROUP_ID = "_groupId";
|
||||
|
||||
/**
|
||||
* TODO GGG eventually we will support this.
|
||||
* Whether mdm should be performed on group export items to expand the group items to linked items before performing the export
|
||||
*/
|
||||
// public static final String PARAM_EXPORT_MDM = "_mdm";
|
||||
public static final String PARAM_EXPORT_MDM = "_mdm";
|
||||
|
||||
/**
|
||||
* Parameter for delete to indicate the deleted resources should also be expunged
|
||||
|
|
Loading…
Reference in New Issue