mirror of https://github.com/apache/archiva.git
database and repository teasks with a single task queue for processing
git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches/archiva-jpox-database-refactor@527585 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
79a7f98efa
commit
d225667efd
|
@ -33,6 +33,10 @@
|
||||||
<groupId>org.apache.maven.archiva</groupId>
|
<groupId>org.apache.maven.archiva</groupId>
|
||||||
<artifactId>archiva-configuration</artifactId>
|
<artifactId>archiva-configuration</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.maven.archiva</groupId>
|
||||||
|
<artifactId>archiva-database</artifactId>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.codehaus.plexus</groupId>
|
<groupId>org.codehaus.plexus</groupId>
|
||||||
<artifactId>plexus-taskqueue</artifactId>
|
<artifactId>plexus-taskqueue</artifactId>
|
||||||
|
@ -61,6 +65,20 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.plexus</groupId>
|
<groupId>org.codehaus.plexus</groupId>
|
||||||
<artifactId>plexus-maven-plugin</artifactId>
|
<artifactId>plexus-maven-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>merge</id>
|
||||||
|
<goals>
|
||||||
|
<goal>merge-descriptors</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<descriptors>
|
||||||
|
<descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
|
||||||
|
<descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
|
||||||
|
</descriptors>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
|
@ -26,14 +26,17 @@ import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
||||||
*
|
*
|
||||||
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
||||||
*/
|
*/
|
||||||
public interface RepositoryTaskScheduler
|
public interface ArchivaTaskScheduler
|
||||||
{
|
{
|
||||||
/**
|
/**
|
||||||
* The Plexus component role.
|
* The Plexus component role.
|
||||||
*/
|
*/
|
||||||
String ROLE = RepositoryTaskScheduler.class.getName();
|
public final static String ROLE = ArchivaTaskScheduler.class.getName();
|
||||||
|
|
||||||
void runDataRefresh()
|
public void runDatabaseTasks() throws TaskExecutionException;
|
||||||
throws TaskExecutionException;
|
|
||||||
|
public void runAllRepositoryTasks() throws TaskExecutionException;
|
||||||
|
|
||||||
|
public void runRepositoryTasks( String repositoryId ) throws TaskExecutionException;
|
||||||
|
|
||||||
}
|
}
|
|
@ -0,0 +1,85 @@
|
||||||
|
package org.apache.maven.archiva.scheduled;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||||
|
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
|
||||||
|
import org.codehaus.plexus.scheduler.AbstractJob;
|
||||||
|
import org.codehaus.plexus.taskqueue.TaskQueue;
|
||||||
|
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||||
|
import org.quartz.JobDataMap;
|
||||||
|
import org.quartz.JobExecutionContext;
|
||||||
|
import org.quartz.JobExecutionException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is the database job that is executed by the scheduler.
|
||||||
|
*/
|
||||||
|
public class DatabaseTaskJob
|
||||||
|
extends AbstractJob
|
||||||
|
{
|
||||||
|
static final String TASK_KEY = "EXECUTION";
|
||||||
|
|
||||||
|
static final String TASK_QUEUE = "TASK_QUEUE";
|
||||||
|
|
||||||
|
static final String TASK_QUEUE_POLICY = "TASK_QUEUE_POLICY";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the discoverer and the indexer.
|
||||||
|
*
|
||||||
|
* @param context
|
||||||
|
* @throws org.quartz.JobExecutionException
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void execute( JobExecutionContext context )
|
||||||
|
throws JobExecutionException
|
||||||
|
{
|
||||||
|
JobDataMap dataMap = context.getJobDetail().getJobDataMap();
|
||||||
|
setJobDataMap( dataMap );
|
||||||
|
|
||||||
|
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
|
||||||
|
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
|
||||||
|
|
||||||
|
ArchivaTask task = new DatabaseTask();
|
||||||
|
task.setName( context.getJobDetail().getName() );
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if ( taskQueue.getQueueSnapshot().size() == 0 )
|
||||||
|
{
|
||||||
|
taskQueue.put( task );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if ( ArchivaTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
|
||||||
|
{
|
||||||
|
taskQueue.put( task );
|
||||||
|
}
|
||||||
|
else if ( ArchivaTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
|
||||||
|
{
|
||||||
|
// do not queue anymore, policy is to skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch ( TaskQueueException e )
|
||||||
|
{
|
||||||
|
throw new JobExecutionException( e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,287 @@
|
||||||
|
package org.apache.maven.archiva.scheduled;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||||
|
import org.apache.maven.archiva.configuration.Configuration;
|
||||||
|
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
|
||||||
|
import org.apache.maven.archiva.database.ArchivaDatabaseException;
|
||||||
|
import org.apache.maven.archiva.database.ObjectNotFoundException;
|
||||||
|
import org.apache.maven.archiva.database.RepositoryDAO;
|
||||||
|
import org.apache.maven.archiva.model.ArchivaRepository;
|
||||||
|
import org.codehaus.plexus.logging.AbstractLogEnabled;
|
||||||
|
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
|
||||||
|
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
|
||||||
|
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
|
||||||
|
import org.codehaus.plexus.registry.Registry;
|
||||||
|
import org.codehaus.plexus.registry.RegistryListener;
|
||||||
|
import org.codehaus.plexus.scheduler.Scheduler;
|
||||||
|
import org.codehaus.plexus.taskqueue.TaskQueue;
|
||||||
|
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
||||||
|
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
||||||
|
import org.quartz.CronTrigger;
|
||||||
|
import org.quartz.JobDataMap;
|
||||||
|
import org.quartz.JobDetail;
|
||||||
|
import org.quartz.SchedulerException;
|
||||||
|
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default implementation of a scheduling component for the application.
|
||||||
|
*
|
||||||
|
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
||||||
|
* @plexus.component role="org.apache.maven.archiva.scheduler.ArchivaTaskScheduler"
|
||||||
|
*/
|
||||||
|
public class DefaultArchivaTaskScheduler
|
||||||
|
extends AbstractLogEnabled
|
||||||
|
implements ArchivaTaskScheduler, Startable, RegistryListener
|
||||||
|
{
|
||||||
|
/**
|
||||||
|
* @plexus.requirement
|
||||||
|
*/
|
||||||
|
private Scheduler scheduler;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @plexus.requirement role-hint="archiva-task-queue"
|
||||||
|
*/
|
||||||
|
private TaskQueue archivaTaskQueue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @plexus.requirement
|
||||||
|
*/
|
||||||
|
private ArchivaConfiguration archivaConfiguration;
|
||||||
|
|
||||||
|
|
||||||
|
public static final String DATABASE_DISCOVERER_GROUP = "database-group";
|
||||||
|
|
||||||
|
public static final String DATABASE_JOB = "database-job";
|
||||||
|
public static final String DATABASE_JOB_TRIGGER = "database-job-trigger";
|
||||||
|
|
||||||
|
public static final String REPOSITORY_DISCOVERER_GROUP = "repository-group";
|
||||||
|
|
||||||
|
public static final String REPOSITORY_JOB = "repository-job";
|
||||||
|
public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
|
||||||
|
|
||||||
|
public void start()
|
||||||
|
throws StartingException
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
List repositories = archivaConfiguration.getConfiguration().getRepositories();
|
||||||
|
|
||||||
|
for ( Iterator i = repositories.iterator(); i.hasNext(); )
|
||||||
|
{
|
||||||
|
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
|
||||||
|
|
||||||
|
scheduleRepositoryJobs( repoConfig );
|
||||||
|
}
|
||||||
|
|
||||||
|
scheduleDatabaseJobs( );
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void scheduleRepositoryJobs( RepositoryConfiguration repoConfig )
|
||||||
|
throws SchedulerException
|
||||||
|
{
|
||||||
|
if ( repoConfig.getRefreshCronExpression() == null )
|
||||||
|
{
|
||||||
|
getLogger().warn( "Skipping job, no cron expression for " + repoConfig.getId() );
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// get the cron string for these database scanning jobs
|
||||||
|
String cronString = repoConfig.getRefreshCronExpression();
|
||||||
|
|
||||||
|
// setup the unprocessed artifact job
|
||||||
|
JobDetail repositoryJob =
|
||||||
|
new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId() , REPOSITORY_DISCOVERER_GROUP, RepositoryTaskJob.class );
|
||||||
|
|
||||||
|
JobDataMap dataMap = new JobDataMap();
|
||||||
|
dataMap.put( RepositoryTaskJob.TASK_QUEUE, archivaTaskQueue );
|
||||||
|
dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
|
||||||
|
repositoryJob.setJobDataMap( dataMap );
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
CronTrigger trigger =
|
||||||
|
new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId() , REPOSITORY_DISCOVERER_GROUP, cronString );
|
||||||
|
|
||||||
|
scheduler.scheduleJob( repositoryJob, trigger );
|
||||||
|
}
|
||||||
|
catch ( ParseException e )
|
||||||
|
{
|
||||||
|
getLogger().error( "ParseException in repository scanning cron expression, disabling repository scanning for '" + repoConfig.getId() + "': " + e.getMessage() );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void scheduleDatabaseJobs( )
|
||||||
|
throws SchedulerException
|
||||||
|
{
|
||||||
|
String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
|
||||||
|
|
||||||
|
// setup the unprocessed artifact job
|
||||||
|
JobDetail databaseJob =
|
||||||
|
new JobDetail( DATABASE_JOB, DATABASE_DISCOVERER_GROUP, DatabaseTaskJob.class );
|
||||||
|
|
||||||
|
JobDataMap dataMap = new JobDataMap();
|
||||||
|
dataMap.put( DatabaseTaskJob.TASK_QUEUE, archivaTaskQueue );
|
||||||
|
databaseJob.setJobDataMap( dataMap );
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
CronTrigger trigger =
|
||||||
|
new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_DISCOVERER_GROUP, cronString );
|
||||||
|
|
||||||
|
scheduler.scheduleJob( databaseJob, trigger );
|
||||||
|
}
|
||||||
|
catch ( ParseException e )
|
||||||
|
{
|
||||||
|
getLogger().error( "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void stop()
|
||||||
|
throws StoppingException
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
scheduler.unscheduleJob( DATABASE_JOB, DATABASE_DISCOVERER_GROUP );
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
throw new StoppingException( "Unable to unschedule tasks", e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||||
|
{
|
||||||
|
// nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
||||||
|
{
|
||||||
|
// cronExpression comes from the database scanning section
|
||||||
|
if ( "cronExpression".equals( propertyName ) )
|
||||||
|
{
|
||||||
|
getLogger().debug( "Restarting the database scheduled task after property change: " + propertyName );
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
scheduler.unscheduleJob( DATABASE_JOB, DATABASE_DISCOVERER_GROUP );
|
||||||
|
|
||||||
|
scheduleDatabaseJobs();
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
getLogger().error( "Error restarting the database scanning job after property change." );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// refreshCronExpression comes from the repositories section
|
||||||
|
//
|
||||||
|
// currently we have to reschedule all repo jobs because we don't know where the changed one came from
|
||||||
|
if ( "refreshCronExpression".equals( propertyName ) )
|
||||||
|
{
|
||||||
|
List repositories = archivaConfiguration.getConfiguration().getRepositories();
|
||||||
|
|
||||||
|
for ( Iterator i = repositories.iterator(); i.hasNext(); )
|
||||||
|
{
|
||||||
|
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
|
||||||
|
|
||||||
|
if ( repoConfig.getRefreshCronExpression() != null )
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// unschedule handles jobs that might not exist
|
||||||
|
scheduler.unscheduleJob( REPOSITORY_JOB + ":" + repoConfig.getId() , REPOSITORY_DISCOVERER_GROUP );
|
||||||
|
scheduleRepositoryJobs( repoConfig );
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
getLogger().error( "error restarting job: " + REPOSITORY_JOB + ":" + repoConfig.getId() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void runAllRepositoryTasks() throws TaskExecutionException
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
List repositories = archivaConfiguration.getConfiguration().getRepositories();
|
||||||
|
|
||||||
|
for ( Iterator i = repositories.iterator(); i.hasNext(); )
|
||||||
|
{
|
||||||
|
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
|
||||||
|
|
||||||
|
scheduleRepositoryJobs( repoConfig );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
throw new TaskExecutionException( "Unable to schedule repository jobs: " + e.getMessage(), e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void runDatabaseTasks() throws TaskExecutionException
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
scheduleDatabaseJobs();
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
throw new TaskExecutionException( "Unable to schedule repository jobs: " + e.getMessage(), e );
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void runRepositoryTasks( String repositoryId ) throws TaskExecutionException
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
RepositoryConfiguration repoConfig = archivaConfiguration.getConfiguration().findRepositoryById( repositoryId );
|
||||||
|
|
||||||
|
scheduleRepositoryJobs( repoConfig );
|
||||||
|
}
|
||||||
|
catch ( SchedulerException e )
|
||||||
|
{
|
||||||
|
throw new TaskExecutionException( "Unable to schedule repository jobs: " + e.getMessage(), e );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -1,210 +0,0 @@
|
||||||
package org.apache.maven.archiva.scheduled;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
|
||||||
import org.apache.maven.archiva.configuration.Configuration;
|
|
||||||
import org.apache.maven.archiva.repositories.ActiveManagedRepositories;
|
|
||||||
import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
|
|
||||||
import org.apache.maven.archiva.scheduler.task.RepositoryTask;
|
|
||||||
import org.codehaus.plexus.logging.AbstractLogEnabled;
|
|
||||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
|
|
||||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
|
|
||||||
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
|
|
||||||
import org.codehaus.plexus.registry.Registry;
|
|
||||||
import org.codehaus.plexus.registry.RegistryListener;
|
|
||||||
import org.codehaus.plexus.scheduler.Scheduler;
|
|
||||||
import org.codehaus.plexus.taskqueue.TaskQueue;
|
|
||||||
import org.codehaus.plexus.taskqueue.TaskQueueException;
|
|
||||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
|
||||||
import org.quartz.CronTrigger;
|
|
||||||
import org.quartz.JobDataMap;
|
|
||||||
import org.quartz.JobDetail;
|
|
||||||
import org.quartz.SchedulerException;
|
|
||||||
|
|
||||||
import java.text.ParseException;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Default implementation of a scheduling component for the application.
|
|
||||||
*
|
|
||||||
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
|
||||||
* @plexus.component role="org.apache.maven.archiva.scheduler.RepositoryTaskScheduler"
|
|
||||||
*/
|
|
||||||
public class DefaultRepositoryTaskScheduler
|
|
||||||
extends AbstractLogEnabled
|
|
||||||
implements RepositoryTaskScheduler, Startable, RegistryListener
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private Scheduler scheduler;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @plexus.requirement role-hint="data-refresh"
|
|
||||||
*/
|
|
||||||
private TaskQueue datarefreshQueue;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private ArchivaConfiguration archivaConfiguration;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private ActiveManagedRepositories activeRepositories;
|
|
||||||
|
|
||||||
private static final String DISCOVERER_GROUP = "DISCOVERER";
|
|
||||||
|
|
||||||
private static final String DATA_REFRESH_JOB = "dataRefreshTask";
|
|
||||||
|
|
||||||
public void start()
|
|
||||||
throws StartingException
|
|
||||||
{
|
|
||||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
|
||||||
archivaConfiguration.addChangeListener( this );
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
scheduleJobs( configuration.getDataRefreshCronExpression() );
|
|
||||||
}
|
|
||||||
catch ( ParseException e )
|
|
||||||
{
|
|
||||||
throw new StartingException( "Invalid configuration: " + configuration.getDataRefreshCronExpression(), e );
|
|
||||||
}
|
|
||||||
catch ( SchedulerException e )
|
|
||||||
{
|
|
||||||
throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void scheduleJobs( String indexerCronExpression )
|
|
||||||
throws ParseException, SchedulerException
|
|
||||||
{
|
|
||||||
JobDetail jobDetail = createJobDetail( DATA_REFRESH_JOB );
|
|
||||||
|
|
||||||
getLogger().info( "Scheduling data-refresh: " + indexerCronExpression );
|
|
||||||
CronTrigger trigger = new CronTrigger( DATA_REFRESH_JOB + "Trigger", DISCOVERER_GROUP, indexerCronExpression );
|
|
||||||
scheduler.scheduleJob( jobDetail, trigger );
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
queueNowIfNeeded();
|
|
||||||
}
|
|
||||||
catch ( org.codehaus.plexus.taskqueue.execution.TaskExecutionException e )
|
|
||||||
{
|
|
||||||
getLogger().error( "Error executing task first time, continuing anyway: " + e.getMessage(), e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private JobDetail createJobDetail( String jobName )
|
|
||||||
{
|
|
||||||
JobDetail jobDetail = new JobDetail( jobName, DISCOVERER_GROUP, RepositoryTaskJob.class );
|
|
||||||
|
|
||||||
JobDataMap dataMap = new JobDataMap();
|
|
||||||
dataMap.put( RepositoryTaskJob.TASK_QUEUE, datarefreshQueue );
|
|
||||||
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, RepositoryTask.QUEUE_POLICY_SKIP );
|
|
||||||
jobDetail.setJobDataMap( dataMap );
|
|
||||||
|
|
||||||
return jobDetail;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void stop()
|
|
||||||
throws StoppingException
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
scheduler.unscheduleJob( DATA_REFRESH_JOB, DISCOVERER_GROUP );
|
|
||||||
}
|
|
||||||
catch ( SchedulerException e )
|
|
||||||
{
|
|
||||||
throw new StoppingException( "Unable to unschedule tasks", e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
|
||||||
{
|
|
||||||
// nothing to do
|
|
||||||
}
|
|
||||||
|
|
||||||
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
|
|
||||||
{
|
|
||||||
if ( "dataRefreshCronExpression".equals( propertyName ) )
|
|
||||||
{
|
|
||||||
getLogger().debug( "Restarting task scheduler with new configuration after property change: " +
|
|
||||||
propertyName + " to " + propertyValue );
|
|
||||||
try
|
|
||||||
{
|
|
||||||
stop();
|
|
||||||
}
|
|
||||||
catch ( StoppingException e )
|
|
||||||
{
|
|
||||||
getLogger().warn( "Error stopping task scheduler: " + e.getMessage(), e );
|
|
||||||
}
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
|
||||||
scheduleJobs( configuration.getDataRefreshCronExpression() );
|
|
||||||
}
|
|
||||||
catch ( ParseException e )
|
|
||||||
{
|
|
||||||
getLogger().error(
|
|
||||||
"Error restarting task scheduler after configuration change, due to configuration error: " +
|
|
||||||
e.getMessage(), e );
|
|
||||||
}
|
|
||||||
catch ( SchedulerException e )
|
|
||||||
{
|
|
||||||
getLogger().error( "Error restarting task scheduler after configuration change: " + e.getMessage(), e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
getLogger().debug(
|
|
||||||
"Not restarting task scheduler with new configuration after property change: " + propertyName );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void runDataRefresh()
|
|
||||||
throws TaskExecutionException
|
|
||||||
{
|
|
||||||
DataRefreshTask task = new DataRefreshTask();
|
|
||||||
task.setJobName( "DATA_REFRESH_INIT" );
|
|
||||||
try
|
|
||||||
{
|
|
||||||
datarefreshQueue.put( task );
|
|
||||||
}
|
|
||||||
catch ( TaskQueueException e )
|
|
||||||
{
|
|
||||||
throw new TaskExecutionException( e.getMessage(), e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void queueNowIfNeeded()
|
|
||||||
throws TaskExecutionException
|
|
||||||
{
|
|
||||||
if ( activeRepositories.needsDataRefresh() )
|
|
||||||
{
|
|
||||||
runDataRefresh();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -19,7 +19,7 @@ package org.apache.maven.archiva.scheduled;
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.maven.archiva.scheduled.tasks.DataRefreshTask;
|
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
|
||||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||||
import org.codehaus.plexus.scheduler.AbstractJob;
|
import org.codehaus.plexus.scheduler.AbstractJob;
|
||||||
import org.codehaus.plexus.taskqueue.TaskQueue;
|
import org.codehaus.plexus.taskqueue.TaskQueue;
|
||||||
|
@ -29,7 +29,7 @@ import org.quartz.JobExecutionContext;
|
||||||
import org.quartz.JobExecutionException;
|
import org.quartz.JobExecutionException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is the discoverer job that is executed by the scheduler.
|
* This class is the repository job that is executed by the scheduler.
|
||||||
*/
|
*/
|
||||||
public class RepositoryTaskJob
|
public class RepositoryTaskJob
|
||||||
extends AbstractJob
|
extends AbstractJob
|
||||||
|
@ -40,6 +40,8 @@ public class RepositoryTaskJob
|
||||||
|
|
||||||
static final String TASK_QUEUE_POLICY = "TASK_QUEUE_POLICY";
|
static final String TASK_QUEUE_POLICY = "TASK_QUEUE_POLICY";
|
||||||
|
|
||||||
|
static final String TASK_REPOSITORY = "TASK_REPOSITORY";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Execute the discoverer and the indexer.
|
* Execute the discoverer and the indexer.
|
||||||
*
|
*
|
||||||
|
@ -56,8 +58,8 @@ public class RepositoryTaskJob
|
||||||
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
|
TaskQueue taskQueue = (TaskQueue) dataMap.get( TASK_QUEUE );
|
||||||
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
|
String queuePolicy = dataMap.get( TASK_QUEUE_POLICY ).toString();
|
||||||
|
|
||||||
RepositoryTask task = new DataRefreshTask();
|
ArchivaTask task = new RepositoryTask();
|
||||||
task.setJobName( context.getJobDetail().getName() );
|
task.setName( context.getJobDetail().getName() );
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -67,11 +69,11 @@ public class RepositoryTaskJob
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if ( RepositoryTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
|
if ( ArchivaTask.QUEUE_POLICY_WAIT.equals( queuePolicy ) )
|
||||||
{
|
{
|
||||||
taskQueue.put( task );
|
taskQueue.put( task );
|
||||||
}
|
}
|
||||||
else if ( RepositoryTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
|
else if ( ArchivaTask.QUEUE_POLICY_SKIP.equals( queuePolicy ) )
|
||||||
{
|
{
|
||||||
// do not queue anymore, policy is to skip
|
// do not queue anymore, policy is to skip
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,61 +0,0 @@
|
||||||
package org.apache.maven.archiva.scheduled.executors;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mutable list of consumer for the Data Refresh.
|
|
||||||
*
|
|
||||||
* NOTE: This class only exists to minimize the requirements of manual component management.
|
|
||||||
* This approach allows for a small and simple component definition in the application.xml
|
|
||||||
*
|
|
||||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
|
||||||
* @version $Id$
|
|
||||||
*
|
|
||||||
* @plexus.component role="org.apache.maven.archiva.scheduler.executors.DataRefreshConsumers"
|
|
||||||
*/
|
|
||||||
public class DataRefreshConsumers
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* @plexus.configuration
|
|
||||||
*/
|
|
||||||
private List consumerNames;
|
|
||||||
|
|
||||||
public List getConsumerNames()
|
|
||||||
{
|
|
||||||
if ( consumerNames == null )
|
|
||||||
{
|
|
||||||
consumerNames = new ArrayList();
|
|
||||||
consumerNames.add( "index-artifact" );
|
|
||||||
consumerNames.add( "artifact-health" );
|
|
||||||
consumerNames.add( "metadata-health" );
|
|
||||||
}
|
|
||||||
|
|
||||||
return consumerNames;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Iterator iterator()
|
|
||||||
{
|
|
||||||
return getConsumerNames().iterator();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,191 +0,0 @@
|
||||||
package org.apache.maven.archiva.scheduled.executors;
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
|
||||||
import org.apache.maven.archiva.configuration.Configuration;
|
|
||||||
import org.apache.maven.archiva.configuration.ConfiguredRepositoryFactory;
|
|
||||||
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
|
|
||||||
import org.apache.maven.archiva.discoverer.Discoverer;
|
|
||||||
import org.apache.maven.archiva.discoverer.DiscovererException;
|
|
||||||
import org.apache.maven.archiva.discoverer.DiscovererStatistics;
|
|
||||||
import org.apache.maven.archiva.repository.consumer.Consumer;
|
|
||||||
import org.apache.maven.archiva.repository.consumer.ConsumerException;
|
|
||||||
import org.apache.maven.archiva.repository.consumer.ConsumerFactory;
|
|
||||||
import org.apache.maven.archiva.scheduler.task.DataRefreshTask;
|
|
||||||
import org.apache.maven.artifact.repository.ArtifactRepository;
|
|
||||||
import org.codehaus.plexus.logging.AbstractLogEnabled;
|
|
||||||
import org.codehaus.plexus.taskqueue.Task;
|
|
||||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
|
||||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DataRefreshExecutor
|
|
||||||
*
|
|
||||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
|
||||||
* @version $Id$
|
|
||||||
*
|
|
||||||
* @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
|
|
||||||
* role-hint="data-refresh"
|
|
||||||
*/
|
|
||||||
public class DataRefreshExecutor
|
|
||||||
extends AbstractLogEnabled
|
|
||||||
implements TaskExecutor
|
|
||||||
{
|
|
||||||
public static final String DATAREFRESH_FILE = ".datarefresh";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration store.
|
|
||||||
*
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private ArchivaConfiguration archivaConfiguration;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private ConfiguredRepositoryFactory repoFactory;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @plexus.requirement
|
|
||||||
*/
|
|
||||||
private DataRefreshConsumers consumerNames;
|
|
||||||
|
|
||||||
public void executeTask( Task task )
|
|
||||||
throws TaskExecutionException
|
|
||||||
{
|
|
||||||
DataRefreshTask indexerTask = (DataRefreshTask) task;
|
|
||||||
|
|
||||||
getLogger().info( "Executing task from queue with job name: " + indexerTask.getJobName() );
|
|
||||||
|
|
||||||
execute();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void execute()
|
|
||||||
throws TaskExecutionException
|
|
||||||
{
|
|
||||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
|
||||||
|
|
||||||
List consumers = new ArrayList();
|
|
||||||
|
|
||||||
for ( Iterator it = consumerNames.iterator(); it.hasNext(); )
|
|
||||||
{
|
|
||||||
String name = (String) it.next();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
Consumer consumer = consumerFactory.createConsumer( name );
|
|
||||||
consumers.add( consumer );
|
|
||||||
}
|
|
||||||
catch ( ConsumerException e )
|
|
||||||
{
|
|
||||||
getLogger().warn( e.getMessage(), e );
|
|
||||||
throw new TaskExecutionException( e.getMessage(), e );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
long time = System.currentTimeMillis();
|
|
||||||
|
|
||||||
for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
|
|
||||||
{
|
|
||||||
RepositoryConfiguration repositoryConfiguration = (RepositoryConfiguration) i.next();
|
|
||||||
|
|
||||||
if ( !repositoryConfiguration.isIndexed() )
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
ArtifactRepository repository = repoFactory.createRepository( repositoryConfiguration );
|
|
||||||
|
|
||||||
List filteredConsumers = filterConsumers( consumers, repository );
|
|
||||||
|
|
||||||
DiscovererStatistics lastRunStats = new DiscovererStatistics( repository );
|
|
||||||
try
|
|
||||||
{
|
|
||||||
lastRunStats.load( DATAREFRESH_FILE );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
getLogger().info(
|
|
||||||
"Unable to load last run statistics for repository [" + repository.getId() + "]: "
|
|
||||||
+ e.getMessage() );
|
|
||||||
}
|
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
DiscovererStatistics stats = discoverer
|
|
||||||
.walkRepository( repository, filteredConsumers, repositoryConfiguration.isIncludeSnapshots(),
|
|
||||||
lastRunStats.getTimestampFinished(), null, null );
|
|
||||||
|
|
||||||
stats.dump( getLogger() );
|
|
||||||
stats.save( DATAREFRESH_FILE );
|
|
||||||
}
|
|
||||||
catch ( DiscovererException e )
|
|
||||||
{
|
|
||||||
getLogger().error(
|
|
||||||
"Unable to run data refresh against repository [" + repository.getId() + "]: "
|
|
||||||
+ e.getMessage(), e );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
getLogger().warn(
|
|
||||||
"Unable to save last run statistics for repository [" + repository.getId() + "]: "
|
|
||||||
+ e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
time = System.currentTimeMillis() - time;
|
|
||||||
|
|
||||||
getLogger().info( "Finished data refresh process in " + time + "ms." );
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Not all consumers work with all repositories.
|
|
||||||
* This will filter out those incompatible consumers based on the provided repository.
|
|
||||||
*
|
|
||||||
* @param consumers the initial list of consumers.
|
|
||||||
* @param repository the repository to test consumer against.
|
|
||||||
* @return the filtered list of consumers.
|
|
||||||
*/
|
|
||||||
private List filterConsumers( List consumers, ArtifactRepository repository )
|
|
||||||
{
|
|
||||||
List filtered = new ArrayList();
|
|
||||||
|
|
||||||
for ( Iterator it = consumers.iterator(); it.hasNext(); )
|
|
||||||
{
|
|
||||||
Consumer consumer = (Consumer) it.next();
|
|
||||||
if ( consumer.init( repository ) )
|
|
||||||
{
|
|
||||||
// Approved!
|
|
||||||
filtered.add( consumer );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
getLogger().info( "Disabling consumer [" + consumer.getName() + "] for repository " + repository );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return filtered;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -26,32 +26,43 @@ import org.codehaus.plexus.taskqueue.Task;
|
||||||
*
|
*
|
||||||
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
|
||||||
*/
|
*/
|
||||||
public interface RepositoryTask
|
public interface ArchivaTask
|
||||||
extends Task
|
extends Task
|
||||||
{
|
{
|
||||||
String QUEUE_POLICY_WAIT = "wait";
|
public static final String QUEUE_POLICY_WAIT = "wait";
|
||||||
|
|
||||||
String QUEUE_POLICY_SKIP = "skip";
|
public static final String QUEUE_POLICY_SKIP = "skip";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets the queue policy for this task.
|
* Gets the queue policy for this task.
|
||||||
*
|
*
|
||||||
* @return Queue policy for this task
|
* @return Queue policy for this task
|
||||||
*/
|
*/
|
||||||
String getQueuePolicy();
|
public String getQueuePolicy();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the queue policy for this task.
|
* Sets the queue policy for this task.
|
||||||
*
|
*
|
||||||
* @param policy
|
* @param policy
|
||||||
*/
|
*/
|
||||||
void setQueuePolicy( String policy );
|
public void setQueuePolicy( String policy );
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the job name to represent a group of similar / identical job tasks. Can be used to check the
|
* Sets the job name to represent a group of similar / identical job tasks. Can be used to check the
|
||||||
* task queue for similar / identical job tasks.
|
* task queue for similar / identical job tasks.
|
||||||
*/
|
*/
|
||||||
void setJobName( String jobName );
|
public void setName( String name );
|
||||||
|
|
||||||
|
/**
|
||||||
|
* obtains the name of the task
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
public String getName();
|
||||||
|
|
||||||
|
public long getMaxExecutionTime();
|
||||||
|
|
||||||
|
public void setMaxExecutionTime( long maxExecutionTime );
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
String getJobName();
|
|
||||||
}
|
}
|
|
@ -1,41 +0,0 @@
|
||||||
package org.apache.maven.archiva.scheduled.tasks;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DataRefreshTask - task for discovering changes in the repository
|
|
||||||
* and updating all associated data.
|
|
||||||
*
|
|
||||||
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
|
||||||
* @version $Id$
|
|
||||||
*/
|
|
||||||
public class DataRefreshTask
|
|
||||||
implements RepositoryTask
|
|
||||||
{
|
|
||||||
private String jobName;
|
|
||||||
|
|
||||||
private String policy;
|
|
||||||
|
|
||||||
public String getJobName()
|
|
||||||
{
|
|
||||||
return jobName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getQueuePolicy()
|
|
||||||
{
|
|
||||||
return policy;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setJobName( String jobName )
|
|
||||||
{
|
|
||||||
this.jobName = jobName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setQueuePolicy( String policy )
|
|
||||||
{
|
|
||||||
this.policy = policy;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getMaxExecutionTime()
|
|
||||||
{
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
package org.apache.maven.archiva.scheduled.tasks;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DataRefreshTask - task for discovering changes in the repository
|
||||||
|
* and updating all associated data.
|
||||||
|
*
|
||||||
|
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
|
||||||
|
* @version $Id: DataRefreshTask.java 525176 2007-04-03 15:21:33Z joakime $
|
||||||
|
*/
|
||||||
|
public class DatabaseTask
|
||||||
|
implements ArchivaTask
|
||||||
|
{
|
||||||
|
|
||||||
|
String name;
|
||||||
|
|
||||||
|
String queuePolicy;
|
||||||
|
|
||||||
|
long maxExecutionTime;
|
||||||
|
|
||||||
|
public long getMaxExecutionTime()
|
||||||
|
{
|
||||||
|
return maxExecutionTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setMaxExecutionTime( long maxExecutionTime )
|
||||||
|
{
|
||||||
|
this.maxExecutionTime = maxExecutionTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName()
|
||||||
|
{
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName( String name )
|
||||||
|
{
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getQueuePolicy()
|
||||||
|
{
|
||||||
|
return queuePolicy;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setQueuePolicy( String queuePolicy )
|
||||||
|
{
|
||||||
|
this.queuePolicy = queuePolicy;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
<components>
|
||||||
|
<component>
|
||||||
|
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
|
||||||
|
<role-hint>archiva-task-queue</role-hint>
|
||||||
|
<implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
|
||||||
|
<lifecycle-handler>plexus-configurable</lifecycle-handler>
|
||||||
|
<configuration>
|
||||||
|
<task-entry-evaluators>
|
||||||
|
</task-entry-evaluators>
|
||||||
|
<task-exit-evaluators>
|
||||||
|
</task-exit-evaluators>
|
||||||
|
<task-viability-evaluators>
|
||||||
|
</task-viability-evaluators>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
||||||
|
</components>
|
|
@ -22,7 +22,7 @@ package org.apache.maven.archiva.scheduled.executors;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||||
import org.apache.maven.archiva.configuration.Configuration;
|
import org.apache.maven.archiva.configuration.Configuration;
|
||||||
import org.apache.maven.archiva.scheduled.tasks.DataRefreshTask;
|
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||||
import org.codehaus.plexus.PlexusTestCase;
|
import org.codehaus.plexus.PlexusTestCase;
|
||||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
||||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
||||||
|
@ -45,17 +45,13 @@ public class DataRefreshExecutorTest
|
||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
|
|
||||||
taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "data-refresh" );
|
taskExecutor = (TaskExecutor) lookup( "org.codehaus.plexus.taskqueue.execution.TaskExecutor", "archiva-task-executor" );
|
||||||
|
|
||||||
ArchivaConfiguration archivaConfiguration =
|
ArchivaConfiguration archivaConfiguration =
|
||||||
(ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
|
(ArchivaConfiguration) lookup( ArchivaConfiguration.class.getName() );
|
||||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
Configuration configuration = archivaConfiguration.getConfiguration();
|
||||||
|
|
||||||
File indexPath = new File( configuration.getIndexPath() );
|
|
||||||
if ( indexPath.exists() )
|
|
||||||
{
|
|
||||||
FileUtils.deleteDirectory( indexPath );
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExecutor()
|
public void testExecutor()
|
||||||
|
@ -65,7 +61,7 @@ public class DataRefreshExecutorTest
|
||||||
}
|
}
|
||||||
|
|
||||||
class TestDataRefreshTask
|
class TestDataRefreshTask
|
||||||
extends DataRefreshTask
|
extends RepositoryTask
|
||||||
{
|
{
|
||||||
public String getJobName()
|
public String getJobName()
|
||||||
{
|
{
|
||||||
|
|
Loading…
Reference in New Issue