recordProcessingStepNames = new HashSet<>();
- recordProcessingStepNames.add(BULK_IMPORT_PROCESSING_STEP);
- recordProcessingStepNames.add(BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP);
- RECORD_PROCESSING_STEP_NAMES = Collections.unmodifiableSet(recordProcessingStepNames);
- }
-
- /**
- * Delete Expunge
- */
- public static final String DELETE_EXPUNGE_JOB_NAME = "deleteExpungeJob";
-
- /**
- * Reindex
- */
- public static final String REINDEX_JOB_NAME = "reindexJob";
-
- /**
- * Reindex Everything
- */
- public static final String REINDEX_EVERYTHING_JOB_NAME = "reindexEverythingJob";
-
- /**
- * MDM Clear
- */
- public static final String MDM_CLEAR_JOB_NAME = "mdmClearJob";
-
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java
index 57b77ea52ac..69774c7ac70 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/job/MultiUrlJobParameterValidator.java
@@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.batch.job;
*/
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@@ -29,8 +30,6 @@ import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersInvalidException;
import org.springframework.batch.core.JobParametersValidator;
-import static ca.uhn.fhir.jpa.batch.reader.ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST;
-
/**
* This class will prevent a job from running any of the provided URLs are not valid on this server.
*/
@@ -50,7 +49,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("This job requires Parameters: [urlList]");
}
- RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(JOB_PARAM_REQUEST_LIST));
+ RequestListJson requestListJson = RequestListJson.fromJson(theJobParameters.getString(BatchConstants.JOB_PARAM_REQUEST_LIST));
for (PartitionedUrl partitionedUrl : requestListJson.getPartitionedUrls()) {
String url = partitionedUrl.getUrl();
try {
@@ -60,7 +59,7 @@ public class MultiUrlJobParameterValidator implements JobParametersValidator {
throw new JobParametersInvalidException("The resource type " + resourceName + " is not supported on this server.");
}
} catch (UnsupportedOperationException e) {
- throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
+ throw new JobParametersInvalidException("Failed to parse " + theJobParameters.getString(JOB_PARAM_OPERATION_NAME) + " " + BatchConstants.JOB_PARAM_REQUEST_LIST + " item " + url + ": " + e.getMessage());
}
}
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/MdmClearJobSubmitterImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/MdmClearJobSubmitterImpl.java
index 90bee86b963..246bebac7c4 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/MdmClearJobSubmitterImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/MdmClearJobSubmitterImpl.java
@@ -24,8 +24,8 @@ import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
-import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
import ca.uhn.fhir.jpa.batch.mdm.job.ReverseCronologicalBatchMdmLinkPidReader;
@@ -55,7 +55,7 @@ public class MdmClearJobSubmitterImpl implements IMdmClearJobSubmitter {
@Autowired
private IBatchJobSubmitter myBatchJobSubmitter;
@Autowired
- @Qualifier(BatchJobsConfig.MDM_CLEAR_JOB_NAME)
+ @Qualifier(BatchConstants.MDM_CLEAR_JOB_NAME)
private Job myMdmClearJob;
@Override
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/job/MdmClearJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/job/MdmClearJobConfig.java
index b2786f7cc84..9f8f339191b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/job/MdmClearJobConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/mdm/job/MdmClearJobConfig.java
@@ -41,7 +41,7 @@ import org.springframework.context.annotation.Lazy;
import java.util.ArrayList;
import java.util.List;
-import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.MDM_CLEAR_JOB_NAME;
+import static ca.uhn.fhir.jpa.batch.config.BatchConstants.MDM_CLEAR_JOB_NAME;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BaseReverseCronologicalBatchPidReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BaseReverseCronologicalBatchPidReader.java
index 46c5e91c22f..d32a3c6cae3 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BaseReverseCronologicalBatchPidReader.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/batch/reader/BaseReverseCronologicalBatchPidReader.java
@@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.batch.CommonBatchJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.job.MultiUrlJobParameterValidator;
import ca.uhn.fhir.jpa.batch.job.model.PartitionedUrl;
import ca.uhn.fhir.jpa.batch.job.model.RequestListJson;
@@ -58,22 +59,17 @@ import java.util.function.Function;
/**
* This Spring Batch reader takes 4 parameters:
- * {@link #JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
- * {@link #JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
- * {@link #JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
+ * {@link BatchConstants#JOB_PARAM_REQUEST_LIST}: A list of URLs to search for along with the partitions those searches should be performed on
+ * {@link BatchConstants#JOB_PARAM_BATCH_SIZE}: The number of resources to return with each search. If ommitted, {@link DaoConfig#getExpungeBatchSize} will be used.
+ * {@link BatchConstants#JOB_PARAM_START_TIME}: The latest timestamp of entities to search for
*
- * The reader will return at most {@link #JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
+ * The reader will return at most {@link BatchConstants#JOB_PARAM_BATCH_SIZE} pids every time it is called, or null
* once no more matching entities are available. It returns the resources in reverse chronological order
- * and stores where it's at in the Spring Batch execution context with the key {@link #CURRENT_THRESHOLD_HIGH}
+ * and stores where it's at in the Spring Batch execution context with the key {@link BatchConstants#CURRENT_THRESHOLD_HIGH}
* appended with "." and the index number of the url list item it has gotten up to. This is to permit
* restarting jobs that use this reader so it can pick up where it left off.
*/
public abstract class BaseReverseCronologicalBatchPidReader implements ItemReader>, ItemStream {
- public static final String JOB_PARAM_REQUEST_LIST = "url-list";
- public static final String JOB_PARAM_BATCH_SIZE = "batch-size";
- public static final String JOB_PARAM_START_TIME = "start-time";
- public static final String CURRENT_URL_INDEX = "current.url-index";
- public static final String CURRENT_THRESHOLD_HIGH = "current.threshold-high";
private static final Logger ourLog = LoggerFactory.getLogger(ReverseCronologicalBatchResourcePidReader.class);
private final BatchDateThresholdUpdater myBatchDateThresholdUpdater = new BatchDateThresholdUpdater();
private final Map myThresholdHighByUrlIndex = new HashMap<>();
@@ -88,30 +84,30 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
private Date myStartTime;
private static String highKey(int theIndex) {
- return CURRENT_THRESHOLD_HIGH + "." + theIndex;
+ return BatchConstants.CURRENT_THRESHOLD_HIGH + "." + theIndex;
}
@Nonnull
public static JobParameters buildJobParameters(String theOperationName, Integer theBatchSize, RequestListJson theRequestListJson) {
Map map = new HashMap<>();
map.put(MultiUrlJobParameterValidator.JOB_PARAM_OPERATION_NAME, new JobParameter(theOperationName));
- map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
- map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
+ map.put(BatchConstants.JOB_PARAM_REQUEST_LIST, new JobParameter(theRequestListJson.toJson()));
+ map.put(BatchConstants.JOB_PARAM_START_TIME, new JobParameter(DateUtils.addMinutes(new Date(), CommonBatchJobConfig.MINUTES_IN_FUTURE_TO_PROCESS_FROM)));
if (theBatchSize != null) {
- map.put(ReverseCronologicalBatchResourcePidReader.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
+ map.put(BatchConstants.JOB_PARAM_BATCH_SIZE, new JobParameter(theBatchSize.longValue()));
}
JobParameters parameters = new JobParameters(map);
return parameters;
}
@Autowired
- public void setRequestListJson(@Value("#{jobParameters['" + JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
+ public void setRequestListJson(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_REQUEST_LIST + "']}") String theRequestListJson) {
RequestListJson requestListJson = RequestListJson.fromJson(theRequestListJson);
myPartitionedUrls = requestListJson.getPartitionedUrls();
}
@Autowired
- public void setStartTime(@Value("#{jobParameters['" + JOB_PARAM_START_TIME + "']}") Date theStartTime) {
+ public void setStartTime(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_START_TIME + "']}") Date theStartTime) {
myStartTime = theStartTime;
}
@@ -166,8 +162,8 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override
public void open(ExecutionContext executionContext) throws ItemStreamException {
- if (executionContext.containsKey(CURRENT_URL_INDEX)) {
- myUrlIndex = new Long(executionContext.getLong(CURRENT_URL_INDEX)).intValue();
+ if (executionContext.containsKey(BatchConstants.CURRENT_URL_INDEX)) {
+ myUrlIndex = new Long(executionContext.getLong(BatchConstants.CURRENT_URL_INDEX)).intValue();
}
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
String key = highKey(index);
@@ -181,7 +177,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
@Override
public void update(ExecutionContext executionContext) throws ItemStreamException {
- executionContext.putLong(CURRENT_URL_INDEX, myUrlIndex);
+ executionContext.putLong(BatchConstants.CURRENT_URL_INDEX, myUrlIndex);
for (int index = 0; index < myPartitionedUrls.size(); ++index) {
Date date = myThresholdHighByUrlIndex.get(index);
if (date != null) {
@@ -199,7 +195,7 @@ public abstract class BaseReverseCronologicalBatchPidReader implements ItemReade
}
@Autowired
- public void setBatchSize(@Value("#{jobParameters['" + JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
+ public void setBatchSize(@Value("#{jobParameters['" + BatchConstants.JOB_PARAM_BATCH_SIZE + "']}") Integer theBatchSize) {
myBatchSize = theBatchSize;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java
index 7f934cfb248..6f647bd0e58 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BaseBulkItemReader.java
@@ -25,6 +25,7 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
@@ -57,7 +58,7 @@ public abstract class BaseBulkItemReader implements ItemReader, List>chunk(CHUNK_SIZE) //1000 resources per generated file, as the reader returns 10 resources at a time.
.reader(bulkItemReader())
.processor(myPidToIBaseResourceProcessor)
@@ -217,7 +216,7 @@ public class BulkExportJobConfig {
@Bean
public Step bulkExportPartitionStep() {
return myStepBuilderFactory.get("partitionStep")
- .partitioner(BatchJobsConfig.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
+ .partitioner(BatchConstants.BULK_EXPORT_GENERATE_RESOURCE_FILES_STEP, bulkExportResourceTypePartitioner())
.step(bulkExportGenerateResourceFilesStep())
.build();
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
index 64d06052d43..300a26c8c1f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/BulkExportJobParameterValidator.java
@@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkExportJobDao;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.rest.api.Constants;
@@ -34,8 +35,6 @@ import org.springframework.transaction.support.TransactionTemplate;
import java.util.Arrays;
import java.util.Optional;
-import static org.slf4j.LoggerFactory.getLogger;
-
/**
* This class will prevent a job from running if the UUID does not exist or is invalid.
*/
@@ -59,7 +58,7 @@ public class BulkExportJobParameterValidator implements JobParametersValidator {
if (readChunkSize == null || readChunkSize < 1) {
errorBuilder.append("There must be a valid number for readChunkSize, which is at least 1. ");
}
- String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
+ String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional oJob = myBulkExportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: " + jobUUID + ". ");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java
index 9c10bcecba8..8be17cb3b4d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/CreateBulkExportEntityTasklet.java
@@ -20,10 +20,11 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.Constants;
+import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import org.apache.commons.lang3.StringUtils;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
@@ -42,18 +43,27 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
@Autowired
private IBulkDataExportSvc myBulkDataExportSvc;
+ public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
+ theChunkContext
+ .getStepContext()
+ .getStepExecution()
+ .getJobExecution()
+ .getExecutionContext()
+ .putString(BatchConstants.JOB_UUID_PARAMETER, theJobUUID);
+ }
+
@Override
public RepeatStatus execute(StepContribution theStepContribution, ChunkContext theChunkContext) throws Exception {
Map jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
- if (jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER)) {
- addUUIDToJobContext(theChunkContext, (String)jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
+ if (jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER)) {
+ addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
} else {
- String resourceTypes = (String)jobParameters.get("resourceTypes");
- Date since = (Date)jobParameters.get("since");
- String filters = (String)jobParameters.get("filters");
+ String resourceTypes = (String) jobParameters.get("resourceTypes");
+ Date since = (Date) jobParameters.get("since");
+ String filters = (String) jobParameters.get("filters");
Set filterSet;
if (StringUtils.isBlank(filters)) {
filterSet = null;
@@ -86,13 +96,4 @@ public class CreateBulkExportEntityTasklet implements Tasklet {
return RepeatStatus.FINISHED;
}
}
-
- public static void addUUIDToJobContext(ChunkContext theChunkContext, String theJobUUID) {
- theChunkContext
- .getStepContext()
- .getStepExecution()
- .getJobExecution()
- .getExecutionContext()
- .putString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUUID);
- }
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
index 7eb612d2211..f341d25ffdc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/job/ResourceTypePartitioner.java
@@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.bulk.export.job;
* #L%
*/
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.svc.BulkExportDaoSvc;
import org.slf4j.Logger;
import org.springframework.batch.core.partition.support.Partitioner;
@@ -60,7 +61,7 @@ public class ResourceTypePartitioner implements Partitioner {
// The worker step needs to know which parent job it is processing for, and which collection entity it will be
// attaching its results to.
- context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
+ context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putLong("bulkExportCollectionEntityId", collectionEntityId);
// Name the partition based on the resource type
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
index ab862ce44f2..2caa72135bc 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/export/svc/BulkDataExportSvcImpl.java
@@ -30,9 +30,8 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
-import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
-import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
@@ -49,6 +48,7 @@ import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
+import ca.uhn.fhir.rest.api.server.bulk.BulkDataExportOptions;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
@@ -114,15 +114,15 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private IBatchJobSubmitter myJobSubmitter;
@Autowired
- @Qualifier(BatchJobsConfig.BULK_EXPORT_JOB_NAME)
+ @Qualifier(BatchConstants.BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkExportJob;
@Autowired
- @Qualifier(BatchJobsConfig.GROUP_BULK_EXPORT_JOB_NAME)
+ @Qualifier(BatchConstants.GROUP_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myGroupBulkExportJob;
@Autowired
- @Qualifier(BatchJobsConfig.PATIENT_BULK_EXPORT_JOB_NAME)
+ @Qualifier(BatchConstants.PATIENT_BULK_EXPORT_JOB_NAME)
private org.springframework.batch.core.Job myPatientBulkExportJob;
private Set myCompartmentResources;
@@ -243,7 +243,7 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
private void processJob(BulkExportJobEntity theBulkExportJobEntity) {
String theJobUuid = theBulkExportJobEntity.getJobId();
JobParametersBuilder parameters = new JobParametersBuilder()
- .addString(BulkExportJobConfig.JOB_UUID_PARAMETER, theJobUuid)
+ .addString(BatchConstants.JOB_UUID_PARAMETER, theJobUuid)
.addLong(BulkExportJobConfig.READ_CHUNK_PARAMETER, READ_CHUNK_SIZE);
ourLog.info("Submitting bulk export job {} to job scheduler", theJobUuid);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java
index 52520edf6b3..994aea3796e 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/ActivateBulkImportEntityStepListener.java
@@ -20,10 +20,9 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
-import org.elasticsearch.client.enrich.ExecutePolicyResponse;
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
@@ -39,7 +38,7 @@ public class ActivateBulkImportEntityStepListener implements StepExecutionListen
@Override
public void beforeStep(StepExecution theStepExecution) {
- String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
+ String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid != null) {
myBulkImportDaoSvc.setJobToStatus(jobUuid, BulkImportJobStatusEnum.RUNNING);
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java
index c16e7c3f67c..c1e4bec40fd 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileReader.java
@@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/
import ca.uhn.fhir.context.FhirContext;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobFileJson;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
@@ -42,7 +42,7 @@ public class BulkImportFileReader implements ItemReader
private IBulkDataImportSvc myBulkDataImportSvc;
@Autowired
private FhirContext myFhirContext;
- @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
+ @Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java
index 37a49ca2a95..5edc7ef9e23 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportFileWriter.java
@@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.JobFileRowProcessingModeEnum;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
@@ -39,7 +39,7 @@ import java.util.List;
public class BulkImportFileWriter implements ItemWriter {
private static final Logger ourLog = LoggerFactory.getLogger(BulkImportFileWriter.class);
- @Value("#{stepExecutionContext['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
+ @Value("#{stepExecutionContext['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUuid;
@Value("#{stepExecutionContext['" + BulkImportPartitioner.FILE_INDEX + "']}")
private int myFileIndex;
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java
index 504874e327d..3866829bf16 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobCloser.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.BatchStatus;
@@ -36,7 +36,7 @@ import org.springframework.beans.factory.annotation.Value;
*/
public class BulkImportJobCloser implements Tasklet {
- @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
+ @Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java
index b235c81ea2b..019b5db8ed1 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobConfig.java
@@ -21,14 +21,13 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
-import ca.uhn.fhir.jpa.batch.BatchConstants;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.model.ParsedBulkImportRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobParametersValidator;
import org.springframework.batch.core.Step;
-import org.springframework.batch.core.configuration.JobRegistry;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.JobScope;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
@@ -50,8 +49,8 @@ import org.springframework.retry.policy.TimeoutRetryPolicy;
import javax.batch.api.chunk.listener.RetryProcessListener;
-import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_JOB_NAME;
-import static ca.uhn.fhir.jpa.batch.BatchJobsConfig.BULK_IMPORT_PROCESSING_STEP;
+import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_JOB_NAME;
+import static ca.uhn.fhir.jpa.batch.config.BatchConstants.BULK_IMPORT_PROCESSING_STEP;
/**
* Spring batch Job configuration file. Contains all necessary plumbing to run a
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java
index a46405fec31..c26673665ed 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportJobParameterValidator.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.dao.data.IBulkImportJobDao;
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
import org.apache.commons.lang3.StringUtils;
@@ -52,7 +52,7 @@ public class BulkImportJobParameterValidator implements JobParametersValidator {
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
String errorMessage = txTemplate.execute(tx -> {
StringBuilder errorBuilder = new StringBuilder();
- String jobUUID = theJobParameters.getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
+ String jobUUID = theJobParameters.getString(BatchConstants.JOB_UUID_PARAMETER);
Optional oJob = myBulkImportJobDao.findByJobId(jobUUID);
if (!StringUtils.isBlank(jobUUID) && !oJob.isPresent()) {
errorBuilder.append("There is no persisted job that exists with UUID: ");
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java
index 6a88cfbecab..6df34a9571d 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportPartitioner.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobJson;
import org.slf4j.Logger;
@@ -43,7 +43,7 @@ public class BulkImportPartitioner implements Partitioner {
private static final Logger ourLog = getLogger(BulkImportPartitioner.class);
- @Value("#{jobParameters['" + BulkExportJobConfig.JOB_UUID_PARAMETER + "']}")
+ @Value("#{jobParameters['" + BatchConstants.JOB_UUID_PARAMETER + "']}")
private String myJobUUID;
@Autowired
@@ -61,7 +61,7 @@ public class BulkImportPartitioner implements Partitioner {
String fileDescription = myBulkDataImportSvc.getFileDescription(myJobUUID, i);
ExecutionContext context = new ExecutionContext();
- context.putString(BulkExportJobConfig.JOB_UUID_PARAMETER, myJobUUID);
+ context.putString(BatchConstants.JOB_UUID_PARAMETER, myJobUUID);
context.putInt(FILE_INDEX, i);
context.put(ROW_PROCESSING_MODE, job.getProcessingMode());
context.put(JOB_DESCRIPTION, job.getJobDescription());
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java
index 2861780a4ff..014ec74a92b 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/BulkImportStepListener.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
import ca.uhn.fhir.jpa.bulk.imprt.model.BulkImportJobStatusEnum;
import org.springframework.batch.core.ExitStatus;
@@ -52,9 +52,9 @@ public class BulkImportStepListener implements StepExecutionListener, RetryListe
public ExitStatus afterStep(StepExecution theStepExecution) {
if (theStepExecution.getExitStatus().getExitCode().equals(ExitStatus.FAILED.getExitCode())) {
//Try to fetch it from the parameters first, and if it doesn't exist, fetch it from the context.
- String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
+ String jobUuid = theStepExecution.getJobExecution().getJobParameters().getString(BatchConstants.JOB_UUID_PARAMETER);
if (jobUuid == null) {
- jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BulkExportJobConfig.JOB_UUID_PARAMETER);
+ jobUuid = theStepExecution.getJobExecution().getExecutionContext().getString(BatchConstants.JOB_UUID_PARAMETER);
}
assert isNotBlank(jobUuid);
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java
index c543ba4961f..2d93b98c98f 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/job/CreateBulkImportEntityTasklet.java
@@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.bulk.imprt.job;
* #L%
*/
-import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.bulk.export.job.CreateBulkExportEntityTasklet;
import ca.uhn.fhir.util.ValidateUtil;
import org.springframework.batch.core.StepContribution;
@@ -37,8 +37,8 @@ public class CreateBulkImportEntityTasklet implements Tasklet {
Map jobParameters = theChunkContext.getStepContext().getJobParameters();
//We can leave early if they provided us with an existing job.
- ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BulkExportJobConfig.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
- CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BulkExportJobConfig.JOB_UUID_PARAMETER));
+ ValidateUtil.isTrueOrThrowInvalidRequest(jobParameters.containsKey(BatchConstants.JOB_UUID_PARAMETER), "Job doesn't have a UUID");
+ CreateBulkExportEntityTasklet.addUUIDToJobContext(theChunkContext, (String) jobParameters.get(BatchConstants.JOB_UUID_PARAMETER));
return RepeatStatus.FINISHED;
}
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
index 7c162ea8aca..3124eb43dbf 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/imprt/svc/BulkDataImportSvcImpl.java
@@ -21,8 +21,8 @@ package ca.uhn.fhir.jpa.bulk.imprt.svc;
*/
import ca.uhn.fhir.jpa.api.config.DaoConfig;
-import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.log.Logs;
import ca.uhn.fhir.jpa.bulk.export.job.BulkExportJobConfig;
import ca.uhn.fhir.jpa.bulk.imprt.api.IBulkDataImportSvc;
@@ -79,7 +79,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
@Autowired
private IBatchJobSubmitter myJobSubmitter;
@Autowired
- @Qualifier(BatchJobsConfig.BULK_IMPORT_JOB_NAME)
+ @Qualifier(BatchConstants.BULK_IMPORT_JOB_NAME)
private org.springframework.batch.core.Job myBulkImportJob;
@Autowired
private DaoConfig myDaoConfig;
@@ -271,7 +271,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
ValidateUtil.isTrueOrThrowInvalidRequest(batchSize > 0, "Batch size must be positive");
JobParametersBuilder parameters = new JobParametersBuilder()
- .addString(BulkExportJobConfig.JOB_UUID_PARAMETER, jobId)
+ .addString(BatchConstants.JOB_UUID_PARAMETER, jobId)
.addLong(BulkImportJobConfig.JOB_PARAM_COMMIT_INTERVAL, (long) batchSize);
if (isNotBlank(theBulkExportJobEntity.getJobDescription())) {
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
index 717e5c3aa0e..f2a6ccfe046 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java
@@ -11,9 +11,9 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IDao;
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
-import ca.uhn.fhir.jpa.batch.BatchConstants;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
+import ca.uhn.fhir.jpa.batch.config.BatchConstants;
import ca.uhn.fhir.jpa.batch.config.NonPersistedBatchConfigurer;
import ca.uhn.fhir.jpa.batch.job.PartitionedUrlValidator;
import ca.uhn.fhir.jpa.batch.mdm.MdmBatchJobSubmitterFactoryImpl;
@@ -380,17 +380,6 @@ public abstract class BaseConfig {
return new TermConceptMappingSvcImpl();
}
- @Bean
- public ThreadPoolTaskExecutor searchCoordinatorThreadFactory() {
- final ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
- threadPoolTaskExecutor.setThreadNamePrefix("search_coord_");
- threadPoolTaskExecutor.setCorePoolSize(searchCoordCorePoolSize);
- threadPoolTaskExecutor.setMaxPoolSize(searchCoordMaxPoolSize);
- threadPoolTaskExecutor.setQueueCapacity(searchCoordQueueCapacity);
- threadPoolTaskExecutor.initialize();
- return threadPoolTaskExecutor;
- }
-
@Bean
public TaskScheduler taskScheduler() {
ConcurrentTaskScheduler retVal = new ConcurrentTaskScheduler();
@@ -851,8 +840,8 @@ public abstract class BaseConfig {
}
@Bean
- public ISearchCoordinatorSvc searchCoordinatorSvc(ThreadPoolTaskExecutor searchCoordinatorThreadFactory) {
- return new SearchCoordinatorSvcImpl(searchCoordinatorThreadFactory);
+ public ISearchCoordinatorSvc searchCoordinatorSvc() {
+ return new SearchCoordinatorSvcImpl();
}
@Bean
diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
index d40d555a8de..a5830be5107 100644
--- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
+++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseTransactionProcessor.java
@@ -35,6 +35,7 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
+import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
@@ -44,7 +45,9 @@ import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
+import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryMatchResult;
import ca.uhn.fhir.jpa.searchparam.matcher.InMemoryResourceMatcher;
+import ca.uhn.fhir.jpa.searchparam.matcher.SearchParamMatcher;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.parser.DataFormatException;
@@ -64,6 +67,7 @@ import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.MethodNotAllowedException;
import ca.uhn.fhir.rest.server.exceptions.NotModifiedException;
import ca.uhn.fhir.rest.server.exceptions.PayloadTooLargeException;
+import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.method.BaseMethodBinding;
import ca.uhn.fhir.rest.server.method.BaseResourceReturningMethodBinding;
@@ -80,6 +84,7 @@ import ca.uhn.fhir.util.UrlUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.dstu3.model.Bundle;
import org.hl7.fhir.exceptions.FHIRException;
@@ -156,6 +161,8 @@ public abstract class BaseTransactionProcessor {
private ModelConfig myModelConfig;
@Autowired
private InMemoryResourceMatcher myInMemoryResourceMatcher;
+ @Autowired
+ private SearchParamMatcher mySearchParamMatcher;
private TaskExecutor myExecutor ;
@@ -274,7 +281,9 @@ public abstract class BaseTransactionProcessor {
idSubstitutions.put(id, newId);
}
}
- idToPersistedOutcome.put(newId, outcome);
+
+ populateIdToPersistedOutcomeMap(idToPersistedOutcome, newId, outcome);
+
if (outcome.getCreated()) {
myVersionAdapter.setResponseStatus(newEntry, toStatusString(Constants.STATUS_HTTP_201_CREATED));
} else {
@@ -298,6 +307,21 @@ public abstract class BaseTransactionProcessor {
}
+ /** Method which populates entry in idToPersistedOutcome.
+ * Will store whatever outcome is sent, unless the key already exists, then we only replace an instance if we find that the instance
+ * we are replacing with is non-lazy. This allows us to evaluate later more easily, as we _know_ we need access to these.
+ */
+ private void populateIdToPersistedOutcomeMap(Map idToPersistedOutcome, IIdType newId, DaoMethodOutcome outcome) {
+ //Prefer real method outcomes over lazy ones.
+ if (idToPersistedOutcome.containsKey(newId)) {
+ if (!(outcome instanceof LazyDaoMethodOutcome)) {
+ idToPersistedOutcome.put(newId, outcome);
+ }
+ } else {
+ idToPersistedOutcome.put(newId, outcome);
+ }
+ }
+
private Date getLastModified(IBaseResource theRes) {
return theRes.getMeta().getLastUpdated();
}
@@ -481,7 +505,7 @@ public abstract class BaseTransactionProcessor {
entries.sort(new TransactionSorter(placeholderIds));
// perform all writes
- doTransactionWriteOperations(theRequestDetails, theActionName,
+ prepareThenExecuteTransactionWriteOperations(theRequestDetails, theActionName,
transactionDetails, transactionStopWatch,
response, originalRequestOrder, entries);
@@ -584,38 +608,15 @@ public abstract class BaseTransactionProcessor {
* heavy load with lots of concurrent transactions using all available
* database connections.
*/
- private void doTransactionWriteOperations(RequestDetails theRequestDetails, String theActionName,
- TransactionDetails theTransactionDetails, StopWatch theTransactionStopWatch,
- IBaseBundle theResponse, IdentityHashMap theOriginalRequestOrder,
- List theEntries) {
+ private void prepareThenExecuteTransactionWriteOperations(RequestDetails theRequestDetails, String theActionName,
+ TransactionDetails theTransactionDetails, StopWatch theTransactionStopWatch,
+ IBaseBundle theResponse, IdentityHashMap theOriginalRequestOrder,
+ List theEntries) {
+
TransactionWriteOperationsDetails writeOperationsDetails = null;
- if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, myInterceptorBroadcaster, theRequestDetails) ||
- CompositeInterceptorBroadcaster.hasHooks(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_POST, myInterceptorBroadcaster, theRequestDetails)) {
-
- List updateRequestUrls = new ArrayList<>();
- List conditionalCreateRequestUrls = new ArrayList<>();
- for (IBase nextEntry : theEntries) {
- String method = myVersionAdapter.getEntryRequestVerb(myContext, nextEntry);
- if ("PUT".equals(method)) {
- String requestUrl = myVersionAdapter.getEntryRequestUrl(nextEntry);
- if (isNotBlank(requestUrl)) {
- updateRequestUrls.add(requestUrl);
- }
- } else if ("POST".equals(method)) {
- String requestUrl = myVersionAdapter.getEntryRequestIfNoneExist(nextEntry);
- if (isNotBlank(requestUrl) && requestUrl.contains("?")) {
- conditionalCreateRequestUrls.add(requestUrl);
- }
- }
- }
-
- writeOperationsDetails = new TransactionWriteOperationsDetails();
- writeOperationsDetails.setUpdateRequestUrls(updateRequestUrls);
- writeOperationsDetails.setConditionalCreateRequestUrls(conditionalCreateRequestUrls);
- HookParams params = new HookParams()
- .add(TransactionDetails.class, theTransactionDetails)
- .add(TransactionWriteOperationsDetails.class, writeOperationsDetails);
- CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, params);
+ if (haveWriteOperationsHooks(theRequestDetails)) {
+ writeOperationsDetails = buildWriteOperationsDetails(theEntries);
+ callWriteOperationsHook(Pointcut.STORAGE_TRANSACTION_WRITE_OPERATIONS_PRE, theRequestDetails, theTransactionDetails, writeOperationsDetails);
}
TransactionCallback