minor refactor to extract static string variables into a singel constants class in bulk export (#3117)

Co-authored-by: Long Ma <longma@Longs-MBP.hitronhub.home>
This commit is contained in:
longma1 2021-10-28 10:04:31 -06:00 committed by GitHub
parent 8a1e8f4b2f
commit 2067ba83ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 41 additions and 32 deletions

View File

@ -56,6 +56,23 @@ public final class BatchConstants {
public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
public static final String BULK_EXPORT_READ_CHUNK_PARAMETER = "readChunkSize";
public static final String BULK_EXPORT_GROUP_ID_PARAMETER = "groupId";
/**
* Job Parameters
*/
public static final String READ_CHUNK_PARAMETER = "readChunkSize";
public static final String EXPAND_MDM_PARAMETER = "expandMdm";
public static final String GROUP_ID_PARAMETER = "groupId";
public static final String JOB_RESOURCE_TYPES_PARAMETER = "resourceTypes";
public static final String JOB_DESCRIPTION = "jobDescription";
public static final String JOB_SINCE_PARAMETER = "since";
public static final String JOB_TYPE_FILTERS = "filters";
public static final String JOB_COLLECTION_ENTITY_ID = "bulkExportCollectionEntityId";
/**
* Job Execution Context
*/
public static final String JOB_EXECUTION_RESOURCE_TYPE = "resourceType";
/**
* This Set contains the step names across all job types that are appropriate for
* someone to look at the write count for that given step in order to determine the

View File

@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.batch.processor;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.fhirpath.IFhirPath;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
import ca.uhn.fhir.util.ExtensionUtil;
import ca.uhn.fhir.util.HapiExtensions;
@ -57,7 +57,7 @@ public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBa
@Autowired
private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
@Value("#{jobParameters['" + BulkExportJobConfig.EXPAND_MDM_PARAMETER+ "'] ?: false}")
@Value("#{jobParameters['" + BatchConstants.EXPAND_MDM_PARAMETER + "'] ?: false}")
private boolean myMdmEnabled;

View File

@ -52,13 +52,7 @@ import java.util.List;
*/
@Configuration
public class BulkExportJobConfig {
public static final String READ_CHUNK_PARAMETER = "readChunkSize";
public static final String EXPAND_MDM_PARAMETER = "expandMdm";
public static final String GROUP_ID_PARAMETER = "groupId";
public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
public static final int CHUNK_SIZE = 100;
public static final String JOB_DESCRIPTION = "jobDescription";
@Autowired
private FhirContext myFhirContext;

View File

@ -54,7 +54,7 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder();
Long readChunkSize = theJobParameters.getLong(BulkExportJobConfig.READ_CHUNK_PARAMETER);
Long readChunkSize = theJobParameters.getLong(BatchConstants.READ_CHUNK_PARAMETER);
if (readChunkSize == null || readChunkSize < 1) {
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
}
@ -68,16 +68,14 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
boolean hasExistingJob = oJob.isPresent();
//Check for to-be-created parameters.
if (!hasExistingJob) {
String resourceTypes = theJobParameters.getString(BulkExportJobConfig.RESOURCE_TYPES_PARAMETER);
String resourceTypes = theJobParameters.getString(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER);
if (StringUtils.isBlank(resourceTypes)) {
errorBuilder.append("You must include [").append(BulkExportJobConfig.RESOURCE_TYPES_PARAMETER).append("] as a Job Parameter");
errorBuilder.append("You must include [").append(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER).append("] as a Job Parameter");
} else {
String[] resourceArray = resourceTypes.split(",");
Arrays.stream(resourceArray).filter(resourceType -> resourceType.equalsIgnoreCase("Binary"))
.findFirst()
.ifPresent(resourceType -> {
errorBuilder.append("Bulk export of Binary resources is forbidden");
});
.ifPresent(resourceType -> errorBuilder.append("Bulk export of Binary resources is forbidden"));
}
String outputFormat = theJobParameters.getString("outputFormat");

View File

@ -20,14 +20,16 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
public class GroupBulkExportJobParametersBuilder extends BulkExportJobParametersBuilder {
public GroupBulkExportJobParametersBuilder setGroupId(String theGroupId) {
this.addString(BulkExportJobConfig.GROUP_ID_PARAMETER, theGroupId);
this.addString(BatchConstants.GROUP_ID_PARAMETER, theGroupId);
return this;
}
public GroupBulkExportJobParametersBuilder setMdm(boolean theMdm) {
this.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, String.valueOf(theMdm));
this.addString(BatchConstants.EXPAND_MDM_PARAMETER, String.valueOf(theMdm));
return this;
}
}

View File

@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.IResultIterator;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
@ -68,9 +69,9 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
public static final int QUERY_CHUNK_SIZE = 100;
@Value("#{jobParameters['" + BulkExportJobConfig.GROUP_ID_PARAMETER + "']}")
@Value("#{jobParameters['" + BatchConstants.GROUP_ID_PARAMETER + "']}")
private String myGroupId;
@Value("#{jobParameters['" + BulkExportJobConfig.EXPAND_MDM_PARAMETER+ "'] ?: false}")
@Value("#{jobParameters['" + BatchConstants.EXPAND_MDM_PARAMETER+ "'] ?: false}")
private boolean myMdmEnabled;
@Autowired

View File

@ -37,7 +37,7 @@ public class ResourceTypePartitioner implements Partitioner {
private static final Logger ourLog = getLogger(ResourceTypePartitioner.class);
@Value("#{jobExecutionContext['jobUUID']}")
@Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER+ "']}")
private String myJobUUID;
@Autowired
@ -57,12 +57,12 @@ public class ResourceTypePartitioner implements Partitioner {
ExecutionContext context = new ExecutionContext();
//The worker step needs to know what resource type it is looking for.
context.putString("resourceType", resourceType);
context.putString(BatchConstants.JOB_EXECUTION_RESOURCE_TYPE, resourceType);
// The worker step needs to know which parent job it is processing for, and which collection entity it will be
// attaching its results to.
context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putLong("bulkExportCollectionEntityId", collectionEntityId);
context.putLong(BatchConstants.JOB_COLLECTION_ENTITY_ID, collectionEntityId);
// Name the partition based on the resource type
partitionContextMap.put(resourceType, context);

View File

@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
@ -244,7 +243,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
String theJobUuid = theBulkExportJobEntity.getJobId();
JobParametersBuilder parameters = new JobParametersBuilder()
.addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
.addLong(BatchConstants.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);
@ -273,8 +272,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) {
String theGroupId = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID);
String expandMdm = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_MDM);
theParameters.addString(BulkExportJobConfig.GROUP_ID_PARAMETER, theGroupId);
theParameters.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, expandMdm);
theParameters.addString(BatchConstants.GROUP_ID_PARAMETER, theGroupId);
theParameters.addString(BatchConstants.EXPAND_MDM_PARAMETER, expandMdm);
}

View File

@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
@ -275,7 +274,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
if (isNotBlank(theBulkExportJobEntity.getJobDescription())) {
parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
parameters.addString(BatchConstants.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
}
ourLog.info("Submitting bulk import job {} to job scheduler", jobId);

View File

@ -6,7 +6,6 @@ import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
@ -46,7 +45,6 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
@ -144,7 +142,7 @@ public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuil
assertTrue(activateJobOutcome);
List<JobExecution> executions = awaitAllBulkImportJobCompletion();
assertEquals("testFlow_TransactionRows", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION));
assertEquals("testFlow_TransactionRows", executions.get(0).getJobParameters().getString(BatchConstants.JOB_DESCRIPTION));
runInTransaction(() -> {
List<BulkImportJobEntity> jobs = myBulkImportJobDao.findAll();

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.parser.IParser;
@ -51,10 +52,10 @@ public abstract class BaseResourceToFileWriter implements ItemWriter<List<IBaseR
protected ByteArrayOutputStream myOutputStream;
@Value("#{stepExecutionContext['bulkExportCollectionEntityId']}")
@Value("#{stepExecutionContext['" + BatchConstants.JOB_COLLECTION_ENTITY_ID + "']}")
protected Long myBulkExportCollectionEntityId;
@Value("#{stepExecutionContext['resourceType']}")
@Value("#{stepExecutionContext['" + BatchConstants.JOB_EXECUTION_RESOURCE_TYPE + "']}")
protected String myResourceType;
protected IFhirResourceDao myBinaryDao;