replace FireAtInterval class with an interface for now

This commit is contained in:
Ken Stevens 2019-11-29 09:21:20 -05:00
parent f450cea9a5
commit a0e363171d
10 changed files with 55 additions and 187 deletions

View File

@ -32,7 +32,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionEntity;
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
import ca.uhn.fhir.jpa.model.util.JpaConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
@ -53,9 +53,7 @@ import org.hl7.fhir.instance.model.api.IBaseBinary;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.InstantType;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -469,18 +467,12 @@ public class BulkDataExportSvcImpl implements IBulkDataExportSvc {
});
}
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public static class SubmitJob extends FireAtIntervalJob {
public static class SubmitJob implements HapiJob {
@Autowired
private IBulkDataExportSvc myTarget;
public SubmitJob() {
super(REFRESH_INTERVAL);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTarget.buildExportFiles();
}
}

View File

@ -26,16 +26,14 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.server.sched.ISchedulerService;
import ca.uhn.fhir.rest.server.sched.ScheduledJobDefinition;
import ca.uhn.fhir.util.UrlUtil;
import org.apache.commons.lang3.time.DateUtils;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -132,18 +130,12 @@ public class CacheWarmingSvcImpl implements ICacheWarmingSvc {
}
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public static class SubmitJob extends FireAtIntervalJob {
public static class SubmitJob implements HapiJob {
@Autowired
private ICacheWarmingSvc myTarget;
public SubmitJob() {
super(SCHEDULED_JOB_INTERVAL);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTarget.performWarmingPass();
}
}

View File

@ -26,7 +26,7 @@ import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.DaoRegistry;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
import ca.uhn.fhir.jpa.search.ISearchCoordinatorSvc;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
@ -55,9 +55,7 @@ import org.hl7.fhir.instance.model.api.IBaseParameters;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -389,18 +387,12 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
}
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public static class SubmitJob extends FireAtIntervalJob {
public static class SubmitJob implements HapiJob {
@Autowired
private ISubscriptionTriggeringSvc myTarget;
public SubmitJob() {
super(SCHEDULE_DELAY);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTarget.runDeliveryPass();
}
}

View File

@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermVersionAdapterSvc;
@ -289,20 +289,12 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
myConceptDao = theConceptDao;
}
public static class SaveDeferredJob extends FireAtIntervalJob {
public static class SaveDeferredJob implements HapiJob {
@Autowired
private ITermDeferredStorageSvc myTerminologySvc;
/**
* Constructor
*/
public SaveDeferredJob() {
super(SCHEDULE_INTERVAL_MILLIS);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTerminologySvc.saveDeferred();
}
}

View File

@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.term;
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
import ca.uhn.fhir.jpa.entity.TermConcept;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
@ -159,20 +159,12 @@ public class TermReindexingSvcImpl implements ITermReindexingSvc {
mySchedulerService.scheduleFixedDelayLocal(SCHEDULE_INTERVAL_MILLIS, jobDefinition);
}
public static class SaveDeferredJob extends FireAtIntervalJob {
public static class SaveDeferredJob implements HapiJob {
@Autowired
private ITermDeferredStorageSvc myTerminologySvc;
/**
* Constructor
*/
public SaveDeferredJob() {
super(SCHEDULE_INTERVAL_MILLIS);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTerminologySvc.saveDeferred();
}
}

View File

@ -1,13 +1,16 @@
package ca.uhn.fhir.jpa.sched;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.rest.server.sched.ISchedulerService;
import ca.uhn.fhir.rest.server.sched.ScheduledJobDefinition;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.quartz.*;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
@ -157,10 +160,7 @@ public class SchedulerServiceImplTest {
}
}
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public static class CountingIntervalJob extends FireAtIntervalJob {
public static class CountingIntervalJob implements HapiJob {
private static int ourCount;
@ -169,12 +169,8 @@ public class SchedulerServiceImplTest {
private String myStringBean;
private ApplicationContext myAppCtx;
public CountingIntervalJob() {
super(500);
}
@Override
public void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
ourLog.info("Job has fired, going to sleep for {}ms", ourTaskDelay);
sleepAtLeast(ourTaskDelay);
ourCount++;

View File

@ -1,70 +0,0 @@
package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR Model
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public abstract class FireAtIntervalJob implements Job {
public static final String NEXT_EXECUTION_TIME = "NEXT_EXECUTION_TIME";
private static final Logger ourLog = LoggerFactory.getLogger(FireAtIntervalJob.class);
private final long myMillisBetweenExecutions;
public FireAtIntervalJob(long theMillisBetweenExecutions) {
myMillisBetweenExecutions = theMillisBetweenExecutions;
}
@Override
public final void execute(JobExecutionContext theContext) {
/** FIXME KHS
Long nextExecution = (Long) theContext.getJobDetail().getJobDataMap().get(NEXT_EXECUTION_TIME);
if (nextExecution != null) {
long cutoff = System.currentTimeMillis();
if (nextExecution >= cutoff) {
ourLog.info("Not enough time has passed since last execution of {}. Skipping this one.", theContext.getJobDetail().getDescription());
return;
}
}
*/
try {
doExecute(theContext);
} catch (Throwable t) {
ourLog.error("Job threw uncaught exception", t);
}
/** FIXME KHS
finally {
long newNextExecution = System.currentTimeMillis() + myMillisBetweenExecutions;
theContext.getJobDetail().getJobDataMap().put(NEXT_EXECUTION_TIME, newNextExecution);
}
*/
}
protected abstract void doExecute(JobExecutionContext theContext);
}

View File

@ -0,0 +1,30 @@
package ca.uhn.fhir.jpa.model.sched;
/*-
* #%L
* HAPI FHIR Model
* %%
* Copyright (C) 2014 - 2019 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.quartz.DisallowConcurrentExecution;
import org.quartz.Job;
import org.quartz.PersistJobDataAfterExecution;
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public interface HapiJob extends Job {
}

View File

@ -1,40 +0,0 @@
package ca.uhn.fhir.jpa.model.sched;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.quartz.JobExecutionContext;
import static ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob.NEXT_EXECUTION_TIME;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class FireAtIntervalJobTest {
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private JobExecutionContext myJobExecutionContext;
@Test
public void testExecutionThrowsException() {
FireAtIntervalJob job = new FireAtIntervalJob(1000) {
@Override
protected void doExecute(JobExecutionContext theContext) {
throw new NullPointerException();
}
};
// No exception thrown please
job.execute(myJobExecutionContext);
verify(myJobExecutionContext.getJobDetail().getJobDataMap(), times(1)).put(eq(NEXT_EXECUTION_TIME), anyLong());
}
}

View File

@ -21,7 +21,7 @@ package ca.uhn.fhir.jpa.subscription.module.cache;
*/
import ca.uhn.fhir.jpa.api.IDaoRegistry;
import ca.uhn.fhir.jpa.model.sched.FireAtIntervalJob;
import ca.uhn.fhir.jpa.model.sched.HapiJob;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.retry.Retrier;
import ca.uhn.fhir.rest.api.server.IBundleProvider;
@ -33,9 +33,7 @@ import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang3.time.DateUtils;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Subscription;
import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobExecutionContext;
import org.quartz.PersistJobDataAfterExecution;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -160,18 +158,12 @@ public class SubscriptionLoader {
mySubscriptionProvider = theSubscriptionProvider;
}
@DisallowConcurrentExecution
@PersistJobDataAfterExecution
public static class SubmitJob extends FireAtIntervalJob {
public static class SubmitJob implements HapiJob {
@Autowired
private SubscriptionLoader myTarget;
public SubmitJob() {
super(REFRESH_INTERVAL);
}
@Override
protected void doExecute(JobExecutionContext theContext) {
public void execute(JobExecutionContext theContext) {
myTarget.syncSubscriptions();
}
}