allow users to set default group

This commit is contained in:
Ken Stevens 2019-12-04 17:23:51 -05:00
parent b171ff97ea
commit 2f89f10c64
7 changed files with 86 additions and 9 deletions

View File

@ -83,6 +83,7 @@ public abstract class BaseConfig {
public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor"; public static final String TASK_EXECUTOR_NAME = "hapiJpaTaskExecutor";
public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider"; public static final String GRAPHQL_PROVIDER_NAME = "myGraphQLProvider";
private static final String HAPI_DEFAULT_SCHEDULER_GROUP = "HAPI";
@Autowired @Autowired
protected Environment myEnv; protected Environment myEnv;
@ -249,7 +250,7 @@ public abstract class BaseConfig {
@Bean @Bean
public ISchedulerService schedulerService() { public ISchedulerService schedulerService() {
return new HapiSchedulerServiceImpl(); return new HapiSchedulerServiceImpl().setDefaultGroup(HAPI_DEFAULT_SCHEDULER_GROUP);
} }
@Bean @Bean

View File

@ -4,6 +4,7 @@ import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.model.sched.IHapiScheduler; import ca.uhn.fhir.jpa.model.sched.IHapiScheduler;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.NotNull;
@ -129,12 +130,13 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
Validate.notNull(theJobDefinition.getJobClass()); Validate.notNull(theJobDefinition.getJobClass());
Validate.notBlank(theJobDefinition.getId()); Validate.notBlank(theJobDefinition.getId());
JobKey jobKey = new JobKey(theJobDefinition.getId()); JobKey jobKey;
jobKey = new JobKey(theJobDefinition.getId(), theJobDefinition.getGroup());
JobDetailImpl jobDetail = new NonConcurrentJobDetailImpl(); JobDetailImpl jobDetail = new NonConcurrentJobDetailImpl();
jobDetail.setJobClass(theJobDefinition.getJobClass()); jobDetail.setJobClass(theJobDefinition.getJobClass());
jobDetail.setKey(jobKey); jobDetail.setKey(jobKey);
jobDetail.setName(theJobDefinition.getId());
jobDetail.setJobDataMap(new JobDataMap(theJobDefinition.getJobData())); jobDetail.setJobDataMap(new JobDataMap(theJobDefinition.getJobData()));
ScheduleBuilder<? extends Trigger> schedule = SimpleScheduleBuilder ScheduleBuilder<? extends Trigger> schedule = SimpleScheduleBuilder
@ -158,6 +160,12 @@ public abstract class BaseHapiScheduler implements IHapiScheduler {
} }
@VisibleForTesting
@Override
public Set<JobKey> getJobKeysForUnitTest() throws SchedulerException {
return myScheduler.getJobKeys(GroupMatcher.anyGroup());
}
private static class NonConcurrentJobDetailImpl extends JobDetailImpl { private static class NonConcurrentJobDetailImpl extends JobDetailImpl {
private static final long serialVersionUID = 5716197221121989740L; private static final long serialVersionUID = 5716197221121989740L;

View File

@ -26,6 +26,8 @@ import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.model.sched.ISmartLifecyclePhase; import ca.uhn.fhir.jpa.model.sched.ISmartLifecyclePhase;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.StopWatch;
import com.google.common.annotations.VisibleForTesting;
import org.quartz.JobKey;
import org.quartz.SchedulerException; import org.quartz.SchedulerException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -35,6 +37,7 @@ import org.springframework.context.SmartLifecycle;
import org.springframework.core.env.Environment; import org.springframework.core.env.Environment;
import javax.annotation.PostConstruct; import javax.annotation.PostConstruct;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
/** /**
@ -65,6 +68,8 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService, Sma
private boolean myClusteredSchedulingEnabled; private boolean myClusteredSchedulingEnabled;
private AtomicBoolean myStopping = new AtomicBoolean(false); private AtomicBoolean myStopping = new AtomicBoolean(false);
private String myDefaultGroup;
@Autowired @Autowired
private Environment myEnvironment; private Environment myEnvironment;
@Autowired @Autowired
@ -77,6 +82,11 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService, Sma
setClusteredSchedulingEnabled(true); setClusteredSchedulingEnabled(true);
} }
public BaseSchedulerServiceImpl setDefaultGroup(String theDefaultGroup) {
myDefaultGroup = theDefaultGroup;
return this;
}
public boolean isLocalSchedulingEnabled() { public boolean isLocalSchedulingEnabled() {
return myLocalSchedulingEnabled; return myLocalSchedulingEnabled;
} }
@ -174,14 +184,32 @@ public abstract class BaseSchedulerServiceImpl implements ISchedulerService, Sma
@Override @Override
public void scheduleLocalJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) { public void scheduleLocalJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
ourLog.info("Scheduling local job {} with interval {}", theJobDefinition.getId(), StopWatch.formatMillis(theIntervalMillis)); scheduleJob("local", myLocalScheduler, theIntervalMillis, theJobDefinition);
myLocalScheduler.scheduleJob(theIntervalMillis, theJobDefinition);
} }
@Override @Override
public void scheduleClusteredJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) { public void scheduleClusteredJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
ourLog.info("Scheduling clustered job {} with interval {}", theJobDefinition.getId(), StopWatch.formatMillis(theIntervalMillis)); scheduleJob("clustered", myClusteredScheduler, theIntervalMillis, theJobDefinition);
myClusteredScheduler.scheduleJob(theIntervalMillis, theJobDefinition); }
private void scheduleJob(String theInstanceName, IHapiScheduler theScheduler, long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
ourLog.info("Scheduling {} job {} with interval {}", theInstanceName, theJobDefinition.getId(), StopWatch.formatMillis(theIntervalMillis));
if (theJobDefinition.getGroup() == null) {
theJobDefinition.setGroup(myDefaultGroup);
}
theScheduler.scheduleJob(theIntervalMillis, theJobDefinition);
}
@VisibleForTesting
@Override
public Set<JobKey> getLocalJobKeysForUnitTest() throws SchedulerException {
return myLocalScheduler.getJobKeysForUnitTest();
}
@VisibleForTesting
@Override
public Set<JobKey> getClusteredJobKeysForUnitTest() throws SchedulerException {
return myClusteredScheduler.getJobKeysForUnitTest();
} }
private boolean isSchedulingDisabledForUnitTests() { private boolean isSchedulingDisabledForUnitTests() {

View File

@ -2,10 +2,13 @@ package ca.uhn.fhir.jpa.sched;
import ca.uhn.fhir.jpa.model.sched.IHapiScheduler; import ca.uhn.fhir.jpa.model.sched.IHapiScheduler;
import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition; import ca.uhn.fhir.jpa.model.sched.ScheduledJobDefinition;
import org.quartz.JobKey;
import org.quartz.SchedulerException; import org.quartz.SchedulerException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Set;
public class HapiNullScheduler implements IHapiScheduler { public class HapiNullScheduler implements IHapiScheduler {
private static final Logger ourLog = LoggerFactory.getLogger(HapiNullScheduler.class); private static final Logger ourLog = LoggerFactory.getLogger(HapiNullScheduler.class);
@ -43,4 +46,9 @@ public class HapiNullScheduler implements IHapiScheduler {
public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) { public void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition) {
ourLog.debug("Skipping scheduling job {} since scheduling is disabled", theJobDefinition.getId()); ourLog.debug("Skipping scheduling job {} since scheduling is disabled", theJobDefinition.getId());
} }
@Override
public Set<JobKey> getJobKeysForUnitTest() {
return null;
}
} }

View File

@ -1,7 +1,10 @@
package ca.uhn.fhir.jpa.model.sched; package ca.uhn.fhir.jpa.model.sched;
import org.quartz.JobKey;
import org.quartz.SchedulerException; import org.quartz.SchedulerException;
import java.util.Set;
public interface IHapiScheduler { public interface IHapiScheduler {
void init() throws SchedulerException; void init() throws SchedulerException;
@ -16,4 +19,6 @@ public interface IHapiScheduler {
void logStatusForUnitTest(); void logStatusForUnitTest();
void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition); void scheduleJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition);
Set<JobKey> getJobKeysForUnitTest() throws SchedulerException;
} }

View File

@ -21,8 +21,11 @@ package ca.uhn.fhir.jpa.model.sched;
*/ */
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.quartz.JobKey;
import org.quartz.SchedulerException; import org.quartz.SchedulerException;
import java.util.Set;
public interface ISchedulerService { public interface ISchedulerService {
@VisibleForTesting @VisibleForTesting
@ -44,5 +47,11 @@ public interface ISchedulerService {
*/ */
void scheduleClusteredJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition); void scheduleClusteredJob(long theIntervalMillis, ScheduledJobDefinition theJobDefinition);
@VisibleForTesting
Set<JobKey> getLocalJobKeysForUnitTest() throws SchedulerException;
@VisibleForTesting
Set<JobKey> getClusteredJobKeysForUnitTest() throws SchedulerException;
boolean isStopping(); boolean isStopping();
} }

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.sched;
*/ */
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.quartz.Job; import org.quartz.Job;
import java.util.Collections; import java.util.Collections;
@ -28,10 +29,9 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class ScheduledJobDefinition { public class ScheduledJobDefinition {
private Class<? extends Job> myJobClass; private Class<? extends Job> myJobClass;
private String myId; private String myId;
private String myGroup;
private Map<String, String> myJobData; private Map<String, String> myJobData;
public Map<String, String> getJobData() { public Map<String, String> getJobData() {
@ -60,6 +60,15 @@ public class ScheduledJobDefinition {
return this; return this;
} }
public String getGroup() {
return myGroup;
}
public ScheduledJobDefinition setGroup(String theGroup) {
myGroup = theGroup;
return this;
}
public void addJobData(String thePropertyName, String thePropertyValue) { public void addJobData(String thePropertyName, String thePropertyValue) {
Validate.notBlank(thePropertyName); Validate.notBlank(thePropertyName);
if (myJobData == null) { if (myJobData == null) {
@ -68,4 +77,13 @@ public class ScheduledJobDefinition {
Validate.isTrue(myJobData.containsKey(thePropertyName) == false); Validate.isTrue(myJobData.containsKey(thePropertyName) == false);
myJobData.put(thePropertyName, thePropertyValue); myJobData.put(thePropertyName, thePropertyValue);
} }
@Override
public String toString() {
return new ToStringBuilder(this)
.append("myJobClass", myJobClass)
.append("myId", myId)
.append("myGroup", myGroup)
.toString();
}
} }