mirror of
https://github.com/hapifhir/hapi-fhir.git
synced 2025-02-26 08:05:05 +00:00
Cleanup and fix compile warnings
This commit is contained in:
parent
b39e2f8909
commit
125c04f846
@ -212,6 +212,7 @@ public class ProviderConstants {
|
|||||||
/**
|
/**
|
||||||
* Whether all resource types should be reindexed
|
* Whether all resource types should be reindexed
|
||||||
*/
|
*/
|
||||||
|
@Deprecated(since = "7.3.4")
|
||||||
public static final String OPERATION_REINDEX_PARAM_EVERYTHING = "everything";
|
public static final String OPERATION_REINDEX_PARAM_EVERYTHING = "everything";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -45,8 +45,8 @@ public class DeleteExpungeAppCtx {
|
|||||||
public JobDefinition<DeleteExpungeJobParameters> expungeJobDefinition(
|
public JobDefinition<DeleteExpungeJobParameters> expungeJobDefinition(
|
||||||
IBatch2DaoSvc theBatch2DaoSvc,
|
IBatch2DaoSvc theBatch2DaoSvc,
|
||||||
HapiTransactionService theHapiTransactionService,
|
HapiTransactionService theHapiTransactionService,
|
||||||
IDeleteExpungeSvc theDeleteExpungeSvc,
|
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
|
||||||
IIdHelperService theIdHelperService,
|
IIdHelperService<?> theIdHelperService,
|
||||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||||
return JobDefinition.newBuilder()
|
return JobDefinition.newBuilder()
|
||||||
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
|
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
|
||||||
@ -65,7 +65,7 @@ public class DeleteExpungeAppCtx {
|
|||||||
"load-ids",
|
"load-ids",
|
||||||
"Load IDs of resources to expunge",
|
"Load IDs of resources to expunge",
|
||||||
ResourceIdListWorkChunkJson.class,
|
ResourceIdListWorkChunkJson.class,
|
||||||
new LoadIdsStep(theBatch2DaoSvc))
|
loadIdsStep(theBatch2DaoSvc))
|
||||||
.addLastStep(
|
.addLastStep(
|
||||||
"expunge",
|
"expunge",
|
||||||
"Perform the resource expunge",
|
"Perform the resource expunge",
|
||||||
@ -76,7 +76,7 @@ public class DeleteExpungeAppCtx {
|
|||||||
@Bean
|
@Bean
|
||||||
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(
|
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(
|
||||||
IBatch2DaoSvc theBatch2DaoSvc,
|
IBatch2DaoSvc theBatch2DaoSvc,
|
||||||
IDeleteExpungeSvc theDeleteExpungeSvc,
|
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
|
||||||
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||||
return new DeleteExpungeJobParametersValidator(
|
return new DeleteExpungeJobParametersValidator(
|
||||||
new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc),
|
new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc),
|
||||||
@ -84,17 +84,22 @@ public class DeleteExpungeAppCtx {
|
|||||||
theRequestPartitionHelperSvc);
|
theRequestPartitionHelperSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public LoadIdsStep<DeleteExpungeJobParameters> loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
|
||||||
|
return new LoadIdsStep<>(theBatch2DaoSvc);
|
||||||
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public DeleteExpungeStep expungeStep(
|
public DeleteExpungeStep expungeStep(
|
||||||
HapiTransactionService theHapiTransactionService,
|
HapiTransactionService theHapiTransactionService,
|
||||||
IDeleteExpungeSvc theDeleteExpungeSvc,
|
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
|
||||||
IIdHelperService theIdHelperService) {
|
IIdHelperService<?> theIdHelperService) {
|
||||||
return new DeleteExpungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService);
|
return new DeleteExpungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public GenerateRangeChunksStep expungeGenerateRangeChunksStep() {
|
public GenerateRangeChunksStep<DeleteExpungeJobParameters> expungeGenerateRangeChunksStep() {
|
||||||
return new GenerateRangeChunksStep();
|
return new GenerateRangeChunksStep<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
@ -32,7 +32,7 @@ public class BulkImportAppCtx {
|
|||||||
public static final int PARAM_MAXIMUM_BATCH_SIZE_DEFAULT = 800; // Avoid the 1000 SQL param limit
|
public static final int PARAM_MAXIMUM_BATCH_SIZE_DEFAULT = 800; // Avoid the 1000 SQL param limit
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public JobDefinition bulkImport2JobDefinition() {
|
public JobDefinition<BulkImportJobParameters> bulkImport2JobDefinition() {
|
||||||
return JobDefinition.newBuilder()
|
return JobDefinition.newBuilder()
|
||||||
.setJobDefinitionId(JOB_BULK_IMPORT_PULL)
|
.setJobDefinitionId(JOB_BULK_IMPORT_PULL)
|
||||||
.setJobDescription("FHIR Bulk Import using pull-based data source")
|
.setJobDescription("FHIR Bulk Import using pull-based data source")
|
||||||
|
@ -20,6 +20,8 @@
|
|||||||
package ca.uhn.fhir.batch2.jobs.reindex;
|
package ca.uhn.fhir.batch2.jobs.reindex;
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
import ca.uhn.fhir.batch2.api.IJobCoordinator;
|
||||||
|
import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
||||||
|
import ca.uhn.fhir.batch2.api.VoidModel;
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
|
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
||||||
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
|
||||||
@ -57,14 +59,21 @@ public class ReindexAppCtx {
|
|||||||
"load-ids",
|
"load-ids",
|
||||||
"Load IDs of resources to reindex",
|
"Load IDs of resources to reindex",
|
||||||
ResourceIdListWorkChunkJson.class,
|
ResourceIdListWorkChunkJson.class,
|
||||||
new LoadIdsStep(theBatch2DaoSvc))
|
loadIdsStep(theBatch2DaoSvc))
|
||||||
.addLastStep("reindex", "Perform the resource reindex", reindexStep())
|
.addLastStep("reindex", "Perform the resource reindex", reindexStep())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public GenerateRangeChunksStep reindexGenerateRangeChunksStep() {
|
public IJobStepWorker<ReindexJobParameters, VoidModel, PartitionedUrlChunkRangeJson>
|
||||||
return new ReindexGenerateRangeChunksStep();
|
reindexGenerateRangeChunksStep() {
|
||||||
|
return new GenerateRangeChunksStep<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public IJobStepWorker<ReindexJobParameters, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> loadIdsStep(
|
||||||
|
IBatch2DaoSvc theBatch2DaoSvc) {
|
||||||
|
return new LoadIdsStep<>(theBatch2DaoSvc);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
@ -1,51 +0,0 @@
|
|||||||
/*-
|
|
||||||
* #%L
|
|
||||||
* hapi-fhir-storage-batch2-jobs
|
|
||||||
* %%
|
|
||||||
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
|
|
||||||
* %%
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
* #L%
|
|
||||||
*/
|
|
||||||
package ca.uhn.fhir.batch2.jobs.reindex;
|
|
||||||
|
|
||||||
import ca.uhn.fhir.batch2.api.IJobDataSink;
|
|
||||||
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
|
|
||||||
import ca.uhn.fhir.batch2.api.RunOutcome;
|
|
||||||
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
|
|
||||||
import ca.uhn.fhir.batch2.api.VoidModel;
|
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
|
|
||||||
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
|
|
||||||
import jakarta.annotation.Nonnull;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
public class ReindexGenerateRangeChunksStep extends GenerateRangeChunksStep<ReindexJobParameters> {
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(ReindexGenerateRangeChunksStep.class);
|
|
||||||
|
|
||||||
@Nonnull
|
|
||||||
@Override
|
|
||||||
public RunOutcome run(
|
|
||||||
@Nonnull StepExecutionDetails<ReindexJobParameters, VoidModel> theStepExecutionDetails,
|
|
||||||
@Nonnull IJobDataSink<PartitionedUrlChunkRangeJson> theDataSink)
|
|
||||||
throws JobExecutionFailedException {
|
|
||||||
|
|
||||||
ReindexJobParameters parameters = theStepExecutionDetails.getParameters();
|
|
||||||
ourLog.info(
|
|
||||||
"Beginning reindex job - OptimizeStorage[{}] - ReindexSearchParameters[{}]",
|
|
||||||
parameters.getOptimizeStorage(),
|
|
||||||
parameters.getReindexSearchParameters());
|
|
||||||
|
|
||||||
return super.run(theStepExecutionDetails, theDataSink);
|
|
||||||
}
|
|
||||||
}
|
|
@ -29,17 +29,10 @@ import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
|||||||
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
|
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
|
||||||
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
|
||||||
import jakarta.annotation.Nonnull;
|
import jakarta.annotation.Nonnull;
|
||||||
import org.slf4j.Logger;
|
|
||||||
|
|
||||||
import static org.slf4j.LoggerFactory.getLogger;
|
public class LoadIdsStep<PT extends PartitionedUrlListJobParameters>
|
||||||
|
implements IJobStepWorker<PT, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> {
|
||||||
public class LoadIdsStep
|
private final ResourceIdListStep<PT, PartitionedUrlChunkRangeJson> myResourceIdListStep;
|
||||||
implements IJobStepWorker<
|
|
||||||
PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> {
|
|
||||||
private static final Logger ourLog = getLogger(LoadIdsStep.class);
|
|
||||||
|
|
||||||
private final ResourceIdListStep<PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson>
|
|
||||||
myResourceIdListStep;
|
|
||||||
|
|
||||||
public LoadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
|
public LoadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
|
||||||
IIdChunkProducer<PartitionedUrlChunkRangeJson> idChunkProducer =
|
IIdChunkProducer<PartitionedUrlChunkRangeJson> idChunkProducer =
|
||||||
@ -50,9 +43,7 @@ public class LoadIdsStep
|
|||||||
@Nonnull
|
@Nonnull
|
||||||
@Override
|
@Override
|
||||||
public RunOutcome run(
|
public RunOutcome run(
|
||||||
@Nonnull
|
@Nonnull StepExecutionDetails<PT, PartitionedUrlChunkRangeJson> theStepExecutionDetails,
|
||||||
StepExecutionDetails<PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson>
|
|
||||||
theStepExecutionDetails,
|
|
||||||
@Nonnull IJobDataSink<ResourceIdListWorkChunkJson> theDataSink)
|
@Nonnull IJobDataSink<ResourceIdListWorkChunkJson> theDataSink)
|
||||||
throws JobExecutionFailedException {
|
throws JobExecutionFailedException {
|
||||||
return myResourceIdListStep.run(theStepExecutionDetails, theDataSink);
|
return myResourceIdListStep.run(theStepExecutionDetails, theDataSink);
|
||||||
|
@ -45,7 +45,6 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
|||||||
public class ResourceIdListStep<PT extends PartitionedJobParameters, IT extends ChunkRangeJson>
|
public class ResourceIdListStep<PT extends PartitionedJobParameters, IT extends ChunkRangeJson>
|
||||||
implements IJobStepWorker<PT, IT, ResourceIdListWorkChunkJson> {
|
implements IJobStepWorker<PT, IT, ResourceIdListWorkChunkJson> {
|
||||||
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
|
||||||
public static final int DEFAULT_PAGE_SIZE = 20000;
|
|
||||||
|
|
||||||
protected static final int MAX_BATCH_OF_IDS = 500;
|
protected static final int MAX_BATCH_OF_IDS = 500;
|
||||||
|
|
||||||
|
@ -256,7 +256,7 @@ public class JobDefinition<PT extends IModelJson> {
|
|||||||
* Adds a processing step for this job.
|
* Adds a processing step for this job.
|
||||||
*
|
*
|
||||||
* @param theStepId A unique identifier for this step. This only needs to be unique within the scope
|
* @param theStepId A unique identifier for this step. This only needs to be unique within the scope
|
||||||
* of the individual job definition (i.e. diuplicates are fine for different jobs, or
|
* of the individual job definition (i.e. duplicates are fine for different jobs, or
|
||||||
* even different versions of the same job)
|
* even different versions of the same job)
|
||||||
* @param theStepDescription A description of this step
|
* @param theStepDescription A description of this step
|
||||||
* @param theStepWorker The worker that will actually perform this step
|
* @param theStepWorker The worker that will actually perform this step
|
||||||
|
@ -19,6 +19,8 @@
|
|||||||
*/
|
*/
|
||||||
package ca.uhn.fhir.mdm.batch2.submit;
|
package ca.uhn.fhir.mdm.batch2.submit;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.batch2.api.IJobStepWorker;
|
||||||
|
import ca.uhn.fhir.batch2.api.VoidModel;
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
|
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
|
||||||
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
|
||||||
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
|
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
|
||||||
@ -38,12 +40,12 @@ public class MdmSubmitAppCtx {
|
|||||||
public static String MDM_SUBMIT_JOB = "MDM_SUBMIT";
|
public static String MDM_SUBMIT_JOB = "MDM_SUBMIT";
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public GenerateRangeChunksStep submitGenerateRangeChunksStep() {
|
public GenerateRangeChunksStep<MdmSubmitJobParameters> submitGenerateRangeChunksStep() {
|
||||||
return new GenerateRangeChunksStep();
|
return new GenerateRangeChunksStep<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean(name = MDM_SUBMIT_JOB_BEAN_NAME)
|
@Bean(name = MDM_SUBMIT_JOB_BEAN_NAME)
|
||||||
public JobDefinition mdmSubmitJobDefinition(
|
public JobDefinition<MdmSubmitJobParameters> mdmSubmitJobDefinition(
|
||||||
IBatch2DaoSvc theBatch2DaoSvc,
|
IBatch2DaoSvc theBatch2DaoSvc,
|
||||||
MatchUrlService theMatchUrlService,
|
MatchUrlService theMatchUrlService,
|
||||||
FhirContext theFhirContext,
|
FhirContext theFhirContext,
|
||||||
@ -61,7 +63,7 @@ public class MdmSubmitAppCtx {
|
|||||||
PartitionedUrlChunkRangeJson.class,
|
PartitionedUrlChunkRangeJson.class,
|
||||||
submitGenerateRangeChunksStep())
|
submitGenerateRangeChunksStep())
|
||||||
.addIntermediateStep(
|
.addIntermediateStep(
|
||||||
"load-ids", "Load the IDs", ResourceIdListWorkChunkJson.class, new LoadIdsStep(theBatch2DaoSvc))
|
"load-ids", "Load the IDs", ResourceIdListWorkChunkJson.class, loadIdsStep(theBatch2DaoSvc))
|
||||||
.addLastStep(
|
.addLastStep(
|
||||||
"inflate-and-submit-resources",
|
"inflate-and-submit-resources",
|
||||||
"Inflate and Submit resources",
|
"Inflate and Submit resources",
|
||||||
@ -76,7 +78,13 @@ public class MdmSubmitAppCtx {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public MdmInflateAndSubmitResourcesStep mdmInflateAndSubmitResourcesStep() {
|
public LoadIdsStep<MdmSubmitJobParameters> loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
|
||||||
|
return new LoadIdsStep<>(theBatch2DaoSvc);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public IJobStepWorker<MdmSubmitJobParameters, ResourceIdListWorkChunkJson, VoidModel>
|
||||||
|
mdmInflateAndSubmitResourcesStep() {
|
||||||
return new MdmInflateAndSubmitResourcesStep();
|
return new MdmInflateAndSubmitResourcesStep();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user