cleaning up

This commit is contained in:
leif stawnyczy 2024-09-26 16:08:09 -04:00
parent bb5d91ac37
commit bddcea8db6
6 changed files with 16 additions and 19 deletions

View File

@ -453,6 +453,9 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc, IHas
List<String> idsToDelete = new ArrayList<>(); List<String> idsToDelete = new ArrayList<>();
for (String id : executions) { for (String id : executions) {
// TODO - might want to consider a "fetch all instances" // TODO - might want to consider a "fetch all instances"
// JobInstanceFetchRequest fetchRequest = new JobInstanceFetchRequest();
// myJobCoordinator.fetchAllJobInstances()
JobInstance instance = myJobCoordinator.getInstance(id); JobInstance instance = myJobCoordinator.getInstance(id);
if (StatusEnum.getEndedStatuses().contains(instance.getStatus())) { if (StatusEnum.getEndedStatuses().contains(instance.getStatus())) {
idsToDelete.add(instance.getInstanceId()); idsToDelete.add(instance.getInstanceId());

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.reindex; package ca.uhn.fhir.jpa.reindex;
import ca.uhn.fhir.batch2.api.IJobCoordinator; import ca.uhn.fhir.batch2.api.IJobCoordinator;
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlJobParameters;
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl; import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrl;
import ca.uhn.fhir.batch2.jobs.parameters.PartitionedUrlJobParameters;
import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx; import ca.uhn.fhir.batch2.jobs.reindex.ReindexAppCtx;
import ca.uhn.fhir.batch2.model.JobInstance; import ca.uhn.fhir.batch2.model.JobInstance;
import ca.uhn.fhir.batch2.model.JobInstanceStartRequest; import ca.uhn.fhir.batch2.model.JobInstanceStartRequest;

View File

@ -18,11 +18,11 @@ public class BaseReindexStep {
protected final HapiTransactionService myHapiTransactionService; protected final HapiTransactionService myHapiTransactionService;
protected IFhirSystemDao<?, ?> mySystemDao; protected final IFhirSystemDao<?, ?> mySystemDao;
protected DaoRegistry myDaoRegistry; protected final DaoRegistry myDaoRegistry;
protected IIdHelperService<IResourcePersistentId<?>> myIdHelperService; protected final IIdHelperService<IResourcePersistentId<?>> myIdHelperService;
public BaseReindexStep( public BaseReindexStep(
HapiTransactionService theHapiTransactionService, HapiTransactionService theHapiTransactionService,

View File

@ -9,18 +9,15 @@ import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.api.VoidModel; import ca.uhn.fhir.batch2.api.VoidModel;
import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults;
import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.api.model.ReindexJobStatus;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import jakarta.annotation.Nonnull; import jakarta.annotation.Nonnull;
import org.springframework.beans.factory.annotation.Autowired;
public class CheckPendingReindexWorkStep implements IJobStepWorker<ReindexJobParameters, ReindexResults, VoidModel> { public class CheckPendingReindexWorkStep implements IJobStepWorker<ReindexJobParameters, ReindexResults, VoidModel> {
@Autowired private final ReindexJobService myReindexJobService;
private ReindexJobService myReindexJobService;
public CheckPendingReindexWorkStep(ReindexJobService theReindexJobService) {
myReindexJobService = theReindexJobService;
}
@Nonnull @Nonnull
@Override @Override

View File

@ -142,7 +142,7 @@ public class ReindexAppCtx {
@Bean @Bean
public CheckPendingReindexWorkStep pendingWorkStep() { public CheckPendingReindexWorkStep pendingWorkStep() {
return new CheckPendingReindexWorkStep(); return new CheckPendingReindexWorkStep(jobService());
} }
@Bean @Bean

View File

@ -7,7 +7,6 @@ import ca.uhn.fhir.batch2.api.RetryChunkLaterException;
import ca.uhn.fhir.batch2.api.RunOutcome; import ca.uhn.fhir.batch2.api.RunOutcome;
import ca.uhn.fhir.batch2.api.StepExecutionDetails; import ca.uhn.fhir.batch2.api.StepExecutionDetails;
import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson; import ca.uhn.fhir.batch2.jobs.chunk.ResourceIdListWorkChunkJson;
import ca.uhn.fhir.batch2.jobs.chunk.TypedPidJson;
import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults; import ca.uhn.fhir.batch2.jobs.reindex.models.ReindexResults;
import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService; import ca.uhn.fhir.batch2.jobs.reindex.svcs.ReindexJobService;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
@ -19,12 +18,10 @@ import jakarta.annotation.Nonnull;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
public class ReindexStepV2 extends BaseReindexStep implements IJobStepWorker<ReindexJobParameters, ResourceIdListWorkChunkJson, ReindexResults> { public class ReindexStepV2 extends BaseReindexStep implements IJobStepWorker<ReindexJobParameters, ResourceIdListWorkChunkJson, ReindexResults> {
private final ReindexJobService myReindexJobService;
private ReindexJobService myReindexJobService;
public ReindexStepV2(ReindexJobService theJobService, public ReindexStepV2(ReindexJobService theJobService,
HapiTransactionService theHapiTransactionService, IFhirSystemDao<?, ?> theSystemDao, DaoRegistry theRegistry, IIdHelperService<IResourcePersistentId<?>> theIdHelperService) { HapiTransactionService theHapiTransactionService, IFhirSystemDao<?, ?> theSystemDao, DaoRegistry theRegistry, IIdHelperService<IResourcePersistentId<?>> theIdHelperService) {
@ -41,10 +38,10 @@ public class ReindexStepV2 extends BaseReindexStep implements IJobStepWorker<Rei
// This is not strictly necessary; // This is not strictly necessary;
// but we'll ensure that no outstanding "reindex work" // but we'll ensure that no outstanding "reindex work"
// is waiting to be completed, so that when we do // is waiting to be completed, so that when we do
// our reindex, it won't skip over data // our reindex work here, it won't skip over that data
Map<String, Boolean> resourceTypesToCheckFlag = new HashMap<>(); Map<String, Boolean> resourceTypesToCheckFlag = new HashMap<>();
data.getTypedPids().forEach(id -> { data.getTypedPids().forEach(id -> {
// we don't really care about duplicates // we don't really care about duplicates; we check by resource type
resourceTypesToCheckFlag.put(id.getResourceType(), true); resourceTypesToCheckFlag.put(id.getResourceType(), true);
}); });
if (myReindexJobService.anyResourceHasPendingReindexWork(resourceTypesToCheckFlag)) { if (myReindexJobService.anyResourceHasPendingReindexWork(resourceTypesToCheckFlag)) {