diff --git a/hapi-fhir-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/BatchConstants.java b/hapi-fhir-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/BatchConstants.java
index 6eb9d0398c4..f5b77e53211 100644
--- a/hapi-fhir-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/BatchConstants.java
+++ b/hapi-fhir-batch/src/main/java/ca/uhn/fhir/jpa/batch/config/BatchConstants.java
@@ -56,6 +56,23 @@ public final class BatchConstants {
 	public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
 	public static final String BULK_EXPORT_READ_CHUNK_PARAMETER = "readChunkSize";
 	public static final String BULK_EXPORT_GROUP_ID_PARAMETER = "groupId";
+	/**
+	 * Job Parameters
+	 */
+	public static final String READ_CHUNK_PARAMETER = "readChunkSize";
+	public static final String EXPAND_MDM_PARAMETER = "expandMdm";
+	public static final String GROUP_ID_PARAMETER = "groupId";
+	public static final String JOB_RESOURCE_TYPES_PARAMETER = "resourceTypes";
+	public static final String JOB_DESCRIPTION = "jobDescription";
+	public static final String JOB_SINCE_PARAMETER = "since";
+	public static final String JOB_TYPE_FILTERS = "filters";
+	public static final String JOB_COLLECTION_ENTITY_ID = "bulkExportCollectionEntityId";
+
+	/**
+	 * Job Execution Context
+	 */
+	public static final String JOB_EXECUTION_RESOURCE_TYPE = "resourceType";
+
 	/**
 	 * This Set contains the step names across all job types that are appropriate for
 	 * someone to look at the write count for that given step in order to determine the
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
index 5bc6169f675..4228bdf782c 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/processor/GoldenResourceAnnotatingProcessor.java
@@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.batch.processor;
 import ca.uhn.fhir.context.FhirContext;
 import ca.uhn.fhir.context.RuntimeSearchParam;
 import ca.uhn.fhir.fhirpath.IFhirPath;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
 import ca.uhn.fhir.jpa.batch.log.Logs;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
 import ca.uhn.fhir.jpa.dao.mdm.MdmExpansionCacheSvc;
 import ca.uhn.fhir.util.ExtensionUtil;
 import ca.uhn.fhir.util.HapiExtensions;
@@ -57,7 +57,7 @@ public class GoldenResourceAnnotatingProcessor implements ItemProcessor<List<IBa
 	@Autowired
 	private MdmExpansionCacheSvc myMdmExpansionCacheSvc;
 
-	@Value("#{jobParameters['" + BulkExportJobConfig.EXPAND_MDM_PARAMETER+ "'] ?: false}")
+	@Value("#{jobParameters['" + BatchConstants.EXPAND_MDM_PARAMETER + "'] ?: false}")
 	private boolean myMdmEnabled;
 
 
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
index 34cf94f4985..0d83c0e77a6 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobConfig.java
@@ -52,13 +52,7 @@ import java.util.List;
  */
 @Configuration
 public class BulkExportJobConfig {
-
-	public static final String READ_CHUNK_PARAMETER = "readChunkSize";
-	public static final String EXPAND_MDM_PARAMETER = "expandMdm";
-	public static final String GROUP_ID_PARAMETER = "groupId";
-	public static final String RESOURCE_TYPES_PARAMETER = "resourceTypes";
 	public static final int CHUNK_SIZE = 100;
-	public static final String JOB_DESCRIPTION = "jobDescription";
 
 	@Autowired
 	private FhirContext myFhirContext;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
index 300a26c8c1f..296fe076d1d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
@@ -54,7 +54,7 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
 		TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
 		String errorMessage = txTemplate.execute(tx -> {
 			StringBuilder errorBuilder = new StringBuilder();
-			Long readChunkSize = theJobParameters.getLong(BulkExportJobConfig.READ_CHUNK_PARAMETER);
+			Long readChunkSize = theJobParameters.getLong(BatchConstants.READ_CHUNK_PARAMETER);
 			if (readChunkSize == null || readChunkSize < 1) {
 				errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
 			}
@@ -68,16 +68,14 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
 			boolean hasExistingJob = oJob.isPresent();
 			//Check for to-be-created parameters.
 			if (!hasExistingJob) {
-				String resourceTypes = theJobParameters.getString(BulkExportJobConfig.RESOURCE_TYPES_PARAMETER);
+				String resourceTypes = theJobParameters.getString(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER);
 				if (StringUtils.isBlank(resourceTypes)) {
-					errorBuilder.append("You must include [").append(BulkExportJobConfig.RESOURCE_TYPES_PARAMETER).append("] as a Job Parameter");
+					errorBuilder.append("You must include [").append(BatchConstants.JOB_RESOURCE_TYPES_PARAMETER).append("] as a Job Parameter");
 				} else {
 					String[] resourceArray = resourceTypes.split(",");
 					Arrays.stream(resourceArray).filter(resourceType -> resourceType.equalsIgnoreCase("Binary"))
 						.findFirst()
-						.ifPresent(resourceType -> {
-							errorBuilder.append("Bulk export of Binary resources is forbidden");
-						});
+						.ifPresent(resourceType -> errorBuilder.append("Bulk export of Binary resources is forbidden"));
 				}
 
 				String outputFormat = theJobParameters.getString("outputFormat");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java
index 5d9b90f7004..a4c80fd4c39 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkExportJobParametersBuilder.java
@@ -20,14 +20,16 @@ package ca.uhn.fhir.jpa.bulk.export.job;
  * #L%
  */
 
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
+
 public class GroupBulkExportJobParametersBuilder extends BulkExportJobParametersBuilder {
 	public GroupBulkExportJobParametersBuilder setGroupId(String theGroupId) {
-		this.addString(BulkExportJobConfig.GROUP_ID_PARAMETER, theGroupId);
+		this.addString(BatchConstants.GROUP_ID_PARAMETER, theGroupId);
 		return this;
 	}
 
 	public GroupBulkExportJobParametersBuilder setMdm(boolean theMdm) {
-		this.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, String.valueOf(theMdm));
+		this.addString(BatchConstants.EXPAND_MDM_PARAMETER, String.valueOf(theMdm));
 		return this;
 	}
 }
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java
index d1cbe5112a8..3246d4c73ed 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/GroupBulkItemReader.java
@@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
 
 import ca.uhn.fhir.context.RuntimeSearchParam;
 import ca.uhn.fhir.interceptor.model.RequestPartitionId;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
 import ca.uhn.fhir.jpa.batch.log.Logs;
 import ca.uhn.fhir.jpa.dao.IResultIterator;
 import ca.uhn.fhir.jpa.dao.ISearchBuilder;
@@ -68,9 +69,9 @@ public class GroupBulkItemReader extends BaseJpaBulkItemReader implements ItemRe
 	private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
 	public static final int QUERY_CHUNK_SIZE = 100;
 
-	@Value("#{jobParameters['" + BulkExportJobConfig.GROUP_ID_PARAMETER + "']}")
+	@Value("#{jobParameters['" + BatchConstants.GROUP_ID_PARAMETER + "']}")
 	private String myGroupId;
-	@Value("#{jobParameters['" + BulkExportJobConfig.EXPAND_MDM_PARAMETER+ "'] ?: false}")
+	@Value("#{jobParameters['" + BatchConstants.EXPAND_MDM_PARAMETER+ "'] ?: false}")
 	private boolean myMdmEnabled;
 
 	@Autowired
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
index f341d25ffdc..e236d3c511b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
@@ -37,7 +37,7 @@ public class ResourceTypePartitioner implements Partitioner {
 	private static final Logger ourLog = getLogger(ResourceTypePartitioner.class);
 
 
-	@Value("#{jobExecutionContext['jobUUID']}")
+	@Value("#{jobExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER+ "']}")
 	private String myJobUUID;
 
 	@Autowired
@@ -57,12 +57,12 @@ public class ResourceTypePartitioner implements Partitioner {
 
 				ExecutionContext context = new ExecutionContext();
 				//The worker step needs to know what resource type it is looking for.
-				context.putString("resourceType", resourceType);
+				context.putString(BatchConstants.JOB_EXECUTION_RESOURCE_TYPE, resourceType);
 
 				// The worker step needs to know which parent job it is processing for, and which collection entity it will be
 				// attaching its results to.
 				context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
-				context.putLong("bulkExportCollectionEntityId", collectionEntityId);
+				context.putLong(BatchConstants.JOB_COLLECTION_ENTITY_ID, collectionEntityId);
 
 				// Name the partition based on the resource type
 				partitionContextMap.put(resourceType, context);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
index 16e176c1457..5c18715cd73 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
@@ -33,7 +33,6 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
 import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
 import ca.uhn.fhir.jpa.batch.config.BatchConstants;
 import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
 import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
 import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionDao;
 import ca.uhn.fhir.jpa.dao.data.IBulkExportCollectionFileDao;
@@ -244,7 +243,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
 		String theJobUuid = theBulkExportJobEntity.getJobId();
 		JobParametersBuilder parameters = new JobParametersBuilder()
 			.addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
-			.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
+			.addLong(BatchConstants.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
 
 		ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);
 
@@ -273,8 +272,8 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
 	private void enhanceBulkParametersWithGroupParameters(BulkExportJobEntity theBulkExportJobEntity, JobParametersBuilder theParameters) {
 		String theGroupId = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_GROUP_ID);
 		String expandMdm  = getQueryParameterIfPresent(theBulkExportJobEntity.getRequest(), JpaConstants.PARAM_EXPORT_MDM);
-		theParameters.addString(BulkExportJobConfig.GROUP_ID_PARAMETER, theGroupId);
-		theParameters.addString(BulkExportJobConfig.EXPAND_MDM_PARAMETER, expandMdm);
+		theParameters.addString(BatchConstants.GROUP_ID_PARAMETER, theGroupId);
+		theParameters.addString(BatchConstants.EXPAND_MDM_PARAMETER, expandMdm);
 	}
 
 
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
index 3124eb43dbf..24989e88231 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
@@ -24,7 +24,6 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
 import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
 import ca.uhn.fhir.jpa.batch.config.BatchConstants;
 import ca.uhn.fhir.jpa.batch.log.Logs;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
 import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
 import ca.uhn.fhir.jpa.bulk.imprt.job.BulkImportJobConfig;
 import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
@@ -275,7 +274,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
 			.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
 
 		if (isNotBlank(theBulkExportJobEntity.getJobDescription())) {
-			parameters.addString(BulkExportJobConfig.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
+			parameters.addString(BatchConstants.JOB_DESCRIPTION, theBulkExportJobEntity.getJobDescription());
 		}
 
 		ourLog.info("Submitting bulk import job {} to job scheduler", jobId);
diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java
index 66a7ffb7591..caf0fb1bd90 100644
--- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java
+++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportR4Test.java
@@ -6,7 +6,6 @@ import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
 import ca.uhn.fhir.interceptor.api.Interceptor;
 import ca.uhn.fhir.interceptor.api.Pointcut;
 import ca.uhn.fhir.jpa.batch.config.BatchConstants;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
 import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
 import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
 import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
@@ -46,7 +45,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import javax.annotation.Nonnull;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 
 import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
@@ -144,7 +142,7 @@ public class BulkDataImportR4Test extends BaseJpaR4Test implements ITestDataBuil
 		assertTrue(activateJobOutcome);
 
 		List<JobExecution> executions = awaitAllBulkImportJobCompletion();
-		assertEquals("testFlow_TransactionRows", executions.get(0).getJobParameters().getString(BulkExportJobConfig.JOB_DESCRIPTION));
+		assertEquals("testFlow_TransactionRows", executions.get(0).getJobParameters().getString(BatchConstants.JOB_DESCRIPTION));
 
 		runInTransaction(() -> {
 			List<BulkImportJobEntity> jobs = myBulkImportJobDao.findAll();
diff --git a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java
index bc36d3c8e02..30a60167b26 100644
--- a/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java
+++ b/hapi-fhir-storage/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseResourceToFileWriter.java
@@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
 import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
 import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
 import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
 import ca.uhn.fhir.jpa.batch.log.Logs;
 import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
 import ca.uhn.fhir.parser.IParser;
@@ -51,10 +52,10 @@ public abstract class BaseResourceToFileWriter implements ItemWriter<List<IBaseR
 
 	protected ByteArrayOutputStream myOutputStream;
 
-	@Value("#{stepExecutionContext['bulkExportCollectionEntityId']}")
+	@Value("#{stepExecutionContext['" + BatchConstants.JOB_COLLECTION_ENTITY_ID + "']}")
 	protected Long myBulkExportCollectionEntityId;
 
-	@Value("#{stepExecutionContext['resourceType']}")
+	@Value("#{stepExecutionContext['" + BatchConstants.JOB_EXECUTION_RESOURCE_TYPE + "']}")
 	protected String myResourceType;
 
 	protected IFhirResourceDao myBinaryDao;