Removed the scheduler component from heroku-tweetstore because the platform can provide it

This commit is contained in:
Andrew Phillips 2012-05-05 18:54:51 -07:00
parent 103a95f22b
commit 596cf4e044
9 changed files with 3 additions and 590 deletions

View File

@ -25,7 +25,6 @@ import java.util.Map;
import javax.servlet.ServletContext;
import org.jclouds.demo.paas.service.scheduler.Scheduler;
import org.jclouds.demo.paas.service.taskqueue.TaskQueue;
import org.jclouds.javax.annotation.Nullable;
@ -36,12 +35,10 @@ import com.google.common.collect.ImmutableMap;
*/
public class PlatformServices {
protected final String baseUrl;
protected final Scheduler scheduler;
private ImmutableMap<String, TaskQueue> taskQueues;
public PlatformServices(String baseUrl, Scheduler scheduler, Map<String, TaskQueue> taskQueues) {
public PlatformServices(String baseUrl, Map<String, TaskQueue> taskQueues) {
this.baseUrl = baseUrl;
this.scheduler = scheduler;
this.taskQueues = ImmutableMap.copyOf(taskQueues);
}
@ -49,10 +46,6 @@ public class PlatformServices {
return baseUrl;
}
public Scheduler getScheduler() {
return scheduler;
}
public @Nullable TaskQueue getTaskQueue(String name) {
return taskQueues.get(name);
}

View File

@ -28,7 +28,6 @@ import javax.servlet.ServletContextListener;
import org.jclouds.concurrent.config.ExecutorServiceModule;
import org.jclouds.demo.paas.PlatformServices;
import org.jclouds.demo.paas.service.scheduler.Scheduler;
import org.jclouds.demo.paas.service.taskqueue.TaskQueue;
import org.jclouds.http.HttpCommandExecutorService;
import org.jclouds.http.config.JavaUrlHttpCommandExecutorServiceModule;
@ -54,8 +53,7 @@ public class PlatformServicesInitializer implements ServletContextListener {
protected static PlatformServices createServices(ServletContext context) {
HttpCommandExecutorService httpClient = createHttpClient(context);
return new PlatformServices(getBaseUrl(context), new Scheduler(httpClient),
createTaskQueues(httpClient));
return new PlatformServices(getBaseUrl(context), createTaskQueues(httpClient));
}
protected static HttpCommandExecutorService createHttpClient(

View File

@ -1,69 +0,0 @@
/**
* Licensed to jclouds, Inc. (jclouds) under one or more
* contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. jclouds licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jclouds.demo.paas.service.scheduler;
import static com.google.common.base.Preconditions.checkNotNull;
import java.net.URI;
import javax.servlet.ServletContext;
import org.jclouds.demo.paas.PlatformServices;
import org.jclouds.demo.paas.RunnableHttpRequest;
import org.jclouds.http.HttpRequest;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.SchedulerException;
/**
* @author Andrew Phillips
*/
public class HttpRequestJob implements Job {
protected static final String URL_ATTRIBUTE_NAME = "url";
// keep in sync with "quartz:scheduler-context-servlet-context-key" param in web.xml
protected static final String SERVLET_CONTEXT_KEY = "servlet-context";
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
PlatformServices platform = JobContexts.getPlatform(context);
RunnableHttpRequest request = platform.getScheduler().getHttpRequestFactory().create(
HttpRequest.builder()
.endpoint(JobContexts.getTargetUrl(platform.getBaseUrl(), context))
.method("GET").build());
request.run();
}
private static class JobContexts {
private static URI getTargetUrl(String baseUrl, JobExecutionContext context) {
return URI.create(baseUrl + (String) checkNotNull(
context.getMergedJobDataMap().get(URL_ATTRIBUTE_NAME), URL_ATTRIBUTE_NAME));
}
private static PlatformServices getPlatform(JobExecutionContext jobContext) throws JobExecutionException {
try {
return PlatformServices.get((ServletContext) checkNotNull(
jobContext.getScheduler().getContext().get(SERVLET_CONTEXT_KEY), SERVLET_CONTEXT_KEY));
} catch (SchedulerException exception) {
throw new JobExecutionException("Unable to get platform services from the job execution context", exception);
}
}
}
}

View File

@ -1,41 +0,0 @@
/**
* Licensed to jclouds, Inc. (jclouds) under one or more
* contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. jclouds licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jclouds.demo.paas.service.scheduler;
import org.jclouds.demo.paas.RunnableHttpRequest;
import org.jclouds.demo.paas.RunnableHttpRequest.Factory;
import org.jclouds.http.HttpCommandExecutorService;
/**
* @author Andrew Phillips
*/
public class Scheduler {
protected static final String SCHEDULER_ORIGINATOR_NAME = "scheduler";
protected final Factory httpRequestFactory;
public Scheduler(HttpCommandExecutorService httpClient) {
httpRequestFactory =
RunnableHttpRequest.factory(httpClient, SCHEDULER_ORIGINATOR_NAME);
}
public Factory getHttpRequestFactory() {
return httpRequestFactory;
}
}

View File

@ -1,401 +0,0 @@
/**
* Licensed to jclouds, Inc. (jclouds) under one or more
* contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. jclouds licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jclouds.demo.paas.service.scheduler.quartz.plugins;
import static org.quartz.SimpleScheduleBuilder.simpleSchedule;
import static org.quartz.TriggerBuilder.newTrigger;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import org.jclouds.logging.Logger;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.SimpleTrigger;
import org.quartz.TriggerKey;
import org.quartz.jobs.FileScanJob;
import org.quartz.jobs.FileScanListener;
import org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin;
import org.quartz.simpl.CascadingClassLoadHelper;
import org.quartz.spi.ClassLoadHelper;
import org.quartz.spi.SchedulerPlugin;
import org.quartz.xml.XMLSchedulingDataProcessor;
/**
* A copy of {@link XMLSchedulingDataProcessorPlugin} that does not reference
* {@code javax.transaction.UserTransaction} as so does not require a dependency
* on JTA.
*
* @author Andrew Phillips
* @see XMLSchedulingDataProcessorPlugin
*/
public class TransactionlessXmlSchedulingDataProcessorPlugin implements
FileScanListener, SchedulerPlugin {
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Data members.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
private static final int MAX_JOB_TRIGGER_NAME_LEN = 80;
private static final String JOB_INITIALIZATION_PLUGIN_NAME = "JobSchedulingDataLoaderPlugin";
private static final String FILE_NAME_DELIMITERS = ",";
private String name;
private Scheduler scheduler;
private final Logger log = Logger.CONSOLE;
private boolean failOnFileNotFound = true;
private String fileNames = XMLSchedulingDataProcessor.QUARTZ_XML_DEFAULT_FILE_NAME;
// Populated by initialization
private Map<String, JobFile> jobFiles = new LinkedHashMap<String, JobFile>();
private long scanInterval = 0;
boolean started = false;
protected ClassLoadHelper classLoadHelper = null;
private Set<String> jobTriggerNameSet = new HashSet<String>();
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Interface.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/**
* Comma separated list of file names (with paths) to the XML files that should be read.
*/
public String getFileNames() {
return fileNames;
}
/**
* The file name (and path) to the XML file that should be read.
*/
public void setFileNames(String fileNames) {
this.fileNames = fileNames;
}
/**
* The interval (in seconds) at which to scan for changes to the file.
* If the file has been changed, it is re-loaded and parsed. The default
* value for the interval is 0, which disables scanning.
*
* @return Returns the scanInterval.
*/
public long getScanInterval() {
return scanInterval / 1000;
}
/**
* The interval (in seconds) at which to scan for changes to the file.
* If the file has been changed, it is re-loaded and parsed. The default
* value for the interval is 0, which disables scanning.
*
* @param scanInterval The scanInterval to set.
*/
public void setScanInterval(long scanInterval) {
this.scanInterval = scanInterval * 1000;
}
/**
* Whether or not initialization of the plugin should fail (throw an
* exception) if the file cannot be found. Default is <code>true</code>.
*/
public boolean isFailOnFileNotFound() {
return failOnFileNotFound;
}
/**
* Whether or not initialization of the plugin should fail (throw an
* exception) if the file cannot be found. Default is <code>true</code>.
*/
public void setFailOnFileNotFound(boolean failOnFileNotFound) {
this.failOnFileNotFound = failOnFileNotFound;
}
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* SchedulerPlugin Interface.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/**
* <p>
* Called during creation of the <code>Scheduler</code> in order to give
* the <code>SchedulerPlugin</code> a chance to initialize.
* </p>
*
* @throws org.quartz.SchedulerConfigException
* if there is an error initializing.
*/
@Override
public void initialize(String name, Scheduler scheduler)
throws SchedulerException {
this.name = name;
this.scheduler = scheduler;
classLoadHelper = new CascadingClassLoadHelper();
classLoadHelper.initialize();
log.info("Registering Quartz Job Initialization Plug-in.");
// Create JobFile objects
StringTokenizer stok = new StringTokenizer(fileNames, FILE_NAME_DELIMITERS);
while (stok.hasMoreTokens()) {
final String fileName = stok.nextToken();
final JobFile jobFile = new JobFile(fileName);
jobFiles.put(fileName, jobFile);
}
}
@Override
public void start() {
try {
if (jobFiles.isEmpty() == false) {
if (scanInterval > 0) {
scheduler.getContext().put(JOB_INITIALIZATION_PLUGIN_NAME + '_' + name, this);
}
Iterator<JobFile> iterator = jobFiles.values().iterator();
while (iterator.hasNext()) {
JobFile jobFile = iterator.next();
if (scanInterval > 0) {
String jobTriggerName = buildJobTriggerName(jobFile.getFileBasename());
TriggerKey tKey = new TriggerKey(jobTriggerName, JOB_INITIALIZATION_PLUGIN_NAME);
// remove pre-existing job/trigger, if any
scheduler.unscheduleJob(tKey);
// TODO: convert to use builder
SimpleTrigger trig = newTrigger()
.withIdentity(jobTriggerName, JOB_INITIALIZATION_PLUGIN_NAME)
.startNow()
.endAt(null)
.withSchedule(simpleSchedule()
.repeatForever()
.withIntervalInMilliseconds(scanInterval))
.build();
JobDetail job = JobBuilder.newJob(FileScanJob.class)
.withIdentity(jobTriggerName, JOB_INITIALIZATION_PLUGIN_NAME)
.build();
job.getJobDataMap().put(FileScanJob.FILE_NAME, jobFile.getFileName());
job.getJobDataMap().put(FileScanJob.FILE_SCAN_LISTENER_NAME, JOB_INITIALIZATION_PLUGIN_NAME + '_' + name);
scheduler.scheduleJob(job, trig);
log.debug("Scheduled file scan job for data file: {}, at interval: {}", jobFile.getFileName(), scanInterval);
}
processFile(jobFile);
}
}
} catch(SchedulerException se) {
log.error("Error starting background-task for watching jobs file.", se);
} finally {
started = true;
}
}
/**
* Helper method for generating unique job/trigger name for the
* file scanning jobs (one per FileJob). The unique names are saved
* in jobTriggerNameSet.
*/
private String buildJobTriggerName(
String fileBasename) {
// Name w/o collisions will be prefix + _ + filename (with '.' of filename replaced with '_')
// For example: JobInitializationPlugin_jobInitializer_myjobs_xml
String jobTriggerName = JOB_INITIALIZATION_PLUGIN_NAME + '_' + name + '_' + fileBasename.replace('.', '_');
// If name is too long (DB column is 80 chars), then truncate to max length
if (jobTriggerName.length() > MAX_JOB_TRIGGER_NAME_LEN) {
jobTriggerName = jobTriggerName.substring(0, MAX_JOB_TRIGGER_NAME_LEN);
}
// Make sure this name is unique in case the same file name under different
// directories is being checked, or had a naming collision due to length truncation.
// If there is a conflict, keep incrementing a _# suffix on the name (being sure
// not to get too long), until we find a unique name.
int currentIndex = 1;
while (jobTriggerNameSet.add(jobTriggerName) == false) {
// If not our first time through, then strip off old numeric suffix
if (currentIndex > 1) {
jobTriggerName = jobTriggerName.substring(0, jobTriggerName.lastIndexOf('_'));
}
String numericSuffix = "_" + currentIndex++;
// If the numeric suffix would make the name too long, then make room for it.
if (jobTriggerName.length() > (MAX_JOB_TRIGGER_NAME_LEN - numericSuffix.length())) {
jobTriggerName = jobTriggerName.substring(0, (MAX_JOB_TRIGGER_NAME_LEN - numericSuffix.length()));
}
jobTriggerName += numericSuffix;
}
return jobTriggerName;
}
@Override
public void shutdown() {
// nothing to do
}
private void processFile(JobFile jobFile) {
if (jobFile == null || !jobFile.getFileFound()) {
return;
}
try {
XMLSchedulingDataProcessor processor =
new XMLSchedulingDataProcessor(this.classLoadHelper);
processor.addJobGroupToNeverDelete(JOB_INITIALIZATION_PLUGIN_NAME);
processor.addTriggerGroupToNeverDelete(JOB_INITIALIZATION_PLUGIN_NAME);
processor.processFileAndScheduleJobs(
jobFile.getFileName(),
jobFile.getFileName(), // systemId
scheduler);
} catch (Exception e) {
log.error("Error scheduling jobs: " + e.getMessage(), e);
}
}
public void processFile(String filePath) {
processFile((JobFile)jobFiles.get(filePath));
}
/**
* @see org.quartz.jobs.FileScanListener#fileUpdated(java.lang.String)
*/
public void fileUpdated(String fileName) {
if (started) {
processFile(fileName);
}
}
class JobFile {
private String fileName;
// These are set by initialize()
private String filePath;
private String fileBasename;
private boolean fileFound;
protected JobFile(String fileName) throws SchedulerException {
this.fileName = fileName;
initialize();
}
protected String getFileName() {
return fileName;
}
protected boolean getFileFound() {
return fileFound;
}
protected String getFilePath() {
return filePath;
}
protected String getFileBasename() {
return fileBasename;
}
private void initialize() throws SchedulerException {
InputStream f = null;
try {
String furl = null;
File file = new File(getFileName()); // files in filesystem
if (!file.exists()) {
URL url = classLoadHelper.getResource(getFileName());
if(url != null) {
try {
furl = URLDecoder.decode(url.getPath(), "UTF-8");
} catch (UnsupportedEncodingException e) {
furl = url.getPath();
}
file = new File(furl);
try {
f = url.openStream();
} catch (IOException ignor) {
// Swallow the exception
}
}
} else {
try {
f = new java.io.FileInputStream(file);
}catch (FileNotFoundException e) {
// ignore
}
}
if (f == null) {
if (isFailOnFileNotFound()) {
throw new SchedulerException(
"File named '" + getFileName() + "' does not exist.");
} else {
log.warn("File named '" + getFileName() + "' does not exist.");
}
} else {
fileFound = true;
}
filePath = (furl != null) ? furl : file.getAbsolutePath();
fileBasename = file.getName();
} finally {
try {
if (f != null) {
f.close();
}
} catch (IOException ioe) {
log.warn("Error closing jobs file " + getFileName(), ioe);
}
}
}
}
}

View File

@ -19,7 +19,6 @@
package org.jclouds.demo.tweetstore.controller;
import static com.google.common.base.Strings.nullToEmpty;
import static org.jclouds.demo.paas.RunnableHttpRequest.PLATFORM_REQUEST_ORIGINATOR_HEADER;
import java.io.IOException;
import java.net.URI;
@ -85,7 +84,7 @@ public class EnqueueStoresController extends HttpServlet {
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (!nullToEmpty(request.getHeader(PLATFORM_REQUEST_ORIGINATOR_HEADER)).equals("scheduler")) {
if (!nullToEmpty(request.getHeader("X-Heroku-Cron")).equals("true")) {
response.sendError(401);
}

View File

@ -1,29 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<job-scheduling-data xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd"
version="2.0">
<schedule>
<job>
<name>enqueue-store-tweet-tasks</name>
<description>Enqueue 'store tweet' tasks for all contexts</description>
<job-class>org.jclouds.demo.paas.service.scheduler.HttpRequestJob</job-class>
<job-data-map>
<entry>
<key>url</key>
<value>/stores/do</value>
</entry>
</job-data-map>
</job>
<trigger>
<calendar-interval>
<name>submit-recurring-job</name>
<job-name>enqueue-store-tweet-tasks</job-name>
<repeat-interval>10</repeat-interval>
<repeat-interval-unit>MINUTE</repeat-interval-unit>
</calendar-interval>
</trigger>
</schedule>
</job-scheduling-data>

View File

@ -1,28 +0,0 @@
#============================================================================
# Configure Main Scheduler Properties
#============================================================================
org.quartz.scheduler.skipUpdateCheck: true
#============================================================================
# Configure ThreadPool
#============================================================================
org.quartz.threadPool.class: org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount: 1
#============================================================================
# Configure JobStore
#============================================================================
org.quartz.jobStore.class: org.quartz.simpl.RAMJobStore
#============================================================================
# Configure the Job Initialization Plugin
#============================================================================
org.quartz.plugin.jobInitializer.class: org.jclouds.demo.paas.service.scheduler.quartz.plugins.TransactionlessXmlSchedulingDataProcessorPlugin
org.quartz.plugin.jobInitializer.fileNames: jobs.xml
org.quartz.plugin.jobInitializer.failOnFileNotFound: true
org.quartz.plugin.jobInitializer.scanInterval: 0
#org.quartz.plugin.jobInitializer.wrapInUserTransaction: false

View File

@ -24,11 +24,6 @@
version="2.5">
<display-name>jclouds-tweetstore</display-name>
<context-param>
<param-name>quartz:scheduler-context-servlet-context-key</param-name>
<param-value>servlet-context</param-value>
</context-param>
<!-- Servlets -->
<filter>
<filter-name>guiceFilter</filter-name>
@ -45,10 +40,6 @@
<listener-class>org.jclouds.demo.paas.config.PlatformServicesInitializer</listener-class>
</listener>
<listener>
<listener-class>org.quartz.ee.servlet.QuartzInitializerListener</listener-class>
</listener>
<listener>
<listener-class>org.jclouds.demo.tweetstore.config.GuiceServletConfig</listener-class>
</listener>