Cleanup and fix compile warnings

This commit is contained in:
Martha 2024-06-13 15:05:54 -07:00
parent b39e2f8909
commit 125c04f846
9 changed files with 45 additions and 83 deletions

View File

@ -212,6 +212,7 @@ public class ProviderConstants {
/**
* Whether all resource types should be reindexed
*/
@Deprecated(since = "7.3.4")
public static final String OPERATION_REINDEX_PARAM_EVERYTHING = "everything";
/**

View File

@ -45,8 +45,8 @@ public class DeleteExpungeAppCtx {
public JobDefinition<DeleteExpungeJobParameters> expungeJobDefinition(
IBatch2DaoSvc theBatch2DaoSvc,
HapiTransactionService theHapiTransactionService,
IDeleteExpungeSvc theDeleteExpungeSvc,
IIdHelperService theIdHelperService,
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
IIdHelperService<?> theIdHelperService,
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
return JobDefinition.newBuilder()
.setJobDefinitionId(JOB_DELETE_EXPUNGE)
@ -65,7 +65,7 @@ public class DeleteExpungeAppCtx {
"load-ids",
"Load IDs of resources to expunge",
ResourceIdListWorkChunkJson.class,
new LoadIdsStep(theBatch2DaoSvc))
loadIdsStep(theBatch2DaoSvc))
.addLastStep(
"expunge",
"Perform the resource expunge",
@ -76,7 +76,7 @@ public class DeleteExpungeAppCtx {
@Bean
public DeleteExpungeJobParametersValidator expungeJobParametersValidator(
IBatch2DaoSvc theBatch2DaoSvc,
IDeleteExpungeSvc theDeleteExpungeSvc,
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
return new DeleteExpungeJobParametersValidator(
new UrlListValidator(ProviderConstants.OPERATION_EXPUNGE, theBatch2DaoSvc),
@ -84,17 +84,22 @@ public class DeleteExpungeAppCtx {
theRequestPartitionHelperSvc);
}
@Bean
public LoadIdsStep<DeleteExpungeJobParameters> loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
return new LoadIdsStep<>(theBatch2DaoSvc);
}
@Bean
public DeleteExpungeStep expungeStep(
HapiTransactionService theHapiTransactionService,
IDeleteExpungeSvc theDeleteExpungeSvc,
IIdHelperService theIdHelperService) {
IDeleteExpungeSvc<?> theDeleteExpungeSvc,
IIdHelperService<?> theIdHelperService) {
return new DeleteExpungeStep(theHapiTransactionService, theDeleteExpungeSvc, theIdHelperService);
}
@Bean
public GenerateRangeChunksStep expungeGenerateRangeChunksStep() {
return new GenerateRangeChunksStep();
public GenerateRangeChunksStep<DeleteExpungeJobParameters> expungeGenerateRangeChunksStep() {
return new GenerateRangeChunksStep<>();
}
@Bean

View File

@ -32,7 +32,7 @@ public class BulkImportAppCtx {
public static final int PARAM_MAXIMUM_BATCH_SIZE_DEFAULT = 800; // Avoid the 1000 SQL param limit
@Bean
public JobDefinition bulkImport2JobDefinition() {
public JobDefinition<BulkImportJobParameters> bulkImport2JobDefinition() {
return JobDefinition.newBuilder()
.setJobDefinitionId(JOB_BULK_IMPORT_PULL)
.setJobDescription("FHIR Bulk Import using pull-based data source")

View File

@ -20,6 +20,8 @@
package ca.uhn.fhir.batch2.jobs.reindex;
import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.parameters.UrlListValidator;
@ -57,14 +59,21 @@ public class ReindexAppCtx {
"load-ids",
"Load IDs of resources to reindex",
ResourceIdListWorkChunkJson.class,
new LoadIdsStep(theBatch2DaoSvc))
loadIdsStep(theBatch2DaoSvc))
.addLastStep("reindex", "Perform the resource reindex", reindexStep())
.build();
}
@Bean
public GenerateRangeChunksStep reindexGenerateRangeChunksStep() {
return new ReindexGenerateRangeChunksStep();
public IJobStepWorker<ReindexJobParameters, VoidModel, PartitionedUrlChunkRangeJson>
reindexGenerateRangeChunksStep() {
return new GenerateRangeChunksStep<>();
}
@Bean
public IJobStepWorker<ReindexJobParameters, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> loadIdsStep(
IBatch2DaoSvc theBatch2DaoSvc) {
return new LoadIdsStep<>(theBatch2DaoSvc);
}
@Bean

View File

@ -1,51 +0,0 @@
/*-
* #%L
* hapi-fhir-storage-batch2-jobs
* %%
* Copyright (C) 2014 - 2024 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package ca.uhn.fhir.batch2.jobs.reindex;
import ca.uhn.fhir.batch2.api.IJobDataSink;
import ca.uhn.fhir.batch2.api.JobExecutionFailedException;
import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
import jakarta.annotation.Nonnull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReindexGenerateRangeChunksStep extends GenerateRangeChunksStep<ReindexJobParameters> {
private static final Logger ourLog = LoggerFactory.getLogger(ReindexGenerateRangeChunksStep.class);
@Nonnull
@Override
public RunOutcome run(
@Nonnull StepExecutionDetails<ReindexJobParameters, VoidModel> theStepExecutionDetails,
@Nonnull IJobDataSink<PartitionedUrlChunkRangeJson> theDataSink)
throws JobExecutionFailedException {
ReindexJobParameters parameters = theStepExecutionDetails.getParameters();
ourLog.info(
"Beginning reindex job - OptimizeStorage[{}] - ReindexSearchParameters[{}]",
parameters.getOptimizeStorage(),
parameters.getReindexSearchParameters());
return super.run(theStepExecutionDetails, theDataSink);
}
}

View File

@ -29,17 +29,10 @@ import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlListJobParameters;
import ca.uhn.fhir.jpa.api.svc.IBatch2DaoSvc;
import jakarta.annotation.Nonnull;
import org.slf4j.Logger;
import static org.slf4j.LoggerFactory.getLogger;
public class LoadIdsStep
implements IJobStepWorker<
PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> {
private static final Logger ourLog = getLogger(LoadIdsStep.class);
private final ResourceIdListStep<PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson>
myResourceIdListStep;
public class LoadIdsStep<PT extends PartitionedUrlListJobParameters>
implements IJobStepWorker<PT, PartitionedUrlChunkRangeJson, ResourceIdListWorkChunkJson> {
private final ResourceIdListStep<PT, PartitionedUrlChunkRangeJson> myResourceIdListStep;
public LoadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
IIdChunkProducer<PartitionedUrlChunkRangeJson> idChunkProducer =
@ -50,9 +43,7 @@ public class LoadIdsStep
@Nonnull
@Override
public RunOutcome run(
@Nonnull
StepExecutionDetails<PartitionedUrlListJobParameters, PartitionedUrlChunkRangeJson>
theStepExecutionDetails,
@Nonnull StepExecutionDetails<PT, PartitionedUrlChunkRangeJson> theStepExecutionDetails,
@Nonnull IJobDataSink<ResourceIdListWorkChunkJson> theDataSink)
throws JobExecutionFailedException {
return myResourceIdListStep.run(theStepExecutionDetails, theDataSink);

View File

@ -45,7 +45,6 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
public class ResourceIdListStep<PT extends PartitionedJobParameters, IT extends ChunkRangeJson>
implements IJobStepWorker<PT, IT, ResourceIdListWorkChunkJson> {
private static final Logger ourLog = Logs.getBatchTroubleshootingLog();
public static final int DEFAULT_PAGE_SIZE = 20000;
protected static final int MAX_BATCH_OF_IDS = 500;

View File

@ -256,7 +256,7 @@ public class JobDefinition<PT extends IModelJson> {
* Adds a processing step for this job.
*
* @param theStepId A unique identifier for this step. This only needs to be unique within the scope
* of the individual job definition (i.e. diuplicates are fine for different jobs, or
* of the individual job definition (i.e. duplicates are fine for different jobs, or
* even different versions of the same job)
* @param theStepDescription A description of this step
* @param theStepWorker The worker that will actually perform this step

View File

@ -19,6 +19,8 @@
*/
package ca.uhn.fhir.mdm.batch2.submit;
import ca.uhn.fhir.batch2.api.IJobStepWorker;
import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.chunk.PartitionedUrlChunkRangeJson;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.step.GenerateRangeChunksStep;
@ -38,12 +40,12 @@ public class MdmSubmitAppCtx {
public static String MDM_SUBMIT_JOB = "MDM_SUBMIT";
@Bean
public GenerateRangeChunksStep submitGenerateRangeChunksStep() {
return new GenerateRangeChunksStep();
public GenerateRangeChunksStep<MdmSubmitJobParameters> submitGenerateRangeChunksStep() {
return new GenerateRangeChunksStep<>();
}
@Bean(name = MDM_SUBMIT_JOB_BEAN_NAME)
public JobDefinition mdmSubmitJobDefinition(
public JobDefinition<MdmSubmitJobParameters> mdmSubmitJobDefinition(
IBatch2DaoSvc theBatch2DaoSvc,
MatchUrlService theMatchUrlService,
FhirContext theFhirContext,
@ -61,7 +63,7 @@ public class MdmSubmitAppCtx {
PartitionedUrlChunkRangeJson.class,
submitGenerateRangeChunksStep())
.addIntermediateStep(
"load-ids", "Load the IDs", ResourceIdListWorkChunkJson.class, new LoadIdsStep(theBatch2DaoSvc))
"load-ids", "Load the IDs", ResourceIdListWorkChunkJson.class, loadIdsStep(theBatch2DaoSvc))
.addLastStep(
"inflate-and-submit-resources",
"Inflate and Submit resources",
@ -76,7 +78,13 @@ public class MdmSubmitAppCtx {
}
@Bean
public MdmInflateAndSubmitResourcesStep mdmInflateAndSubmitResourcesStep() {
public LoadIdsStep<MdmSubmitJobParameters> loadIdsStep(IBatch2DaoSvc theBatch2DaoSvc) {
return new LoadIdsStep<>(theBatch2DaoSvc);
}
@Bean
public IJobStepWorker<MdmSubmitJobParameters, ResourceIdListWorkChunkJson, VoidModel>
mdmInflateAndSubmitResourcesStep() {
return new MdmInflateAndSubmitResourcesStep();
}
}