[MRM-336]: Repository Admin - "Index Repository" appears to not do anything.

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@537304 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2007-05-11 22:25:12 +00:00
parent cfb785460a
commit b2a4a02c00
36 changed files with 999 additions and 438 deletions

View File

@ -2098,27 +2098,110 @@
<codeSegments>
<codeSegment>
<version>1.0.0+</version>
<code>
private transient long startTimestamp;
<code><![CDATA[
private transient long startTimestamp;
public void triggerStart()
{
startTimestamp = System.currentTimeMillis();
}
public void triggerStart() {
startTimestamp = System.currentTimeMillis();
}
public void triggerFinished()
{
long finished = System.currentTimeMillis();
setDuration( finished - startTimestamp );
setWhenGathered( new java.util.Date( finished ) );
}
public void triggerFinished() {
long finished = System.currentTimeMillis();
setDuration( finished - startTimestamp );
setWhenGathered( new java.util.Date( finished ) );
}
public void increaseFileCount()
{
this.totalFileCount++;
}
public void increaseFileCount() {
this.totalFileCount++;
}
public void increaseNewFileCount()
{
this.newFileCount++;
}
public void increaseNewFileCount() {
this.newFileCount++;
private transient java.util.List knownConsumers;
private transient java.util.List invalidConsumers;
public void setKnownConsumers( java.util.List consumers )
{
knownConsumers = consumers;
}
public void setInvalidConsumers( java.util.List consumers )
{
invalidConsumers = consumers;
}
private static boolean isEmpty( java.util.Collection coll )
{
if ( coll == null )
{
return true;
}
return coll.isEmpty();
}
public String toDump( ArchivaRepository repo )
{
java.text.SimpleDateFormat df = new java.text.SimpleDateFormat();
StringBuffer buf = new StringBuffer();
buf.append( "\n.\\ Scan of " ).append( this.getRepositoryId() );
buf.append( " \\.__________________________________________" );
buf.append( "\n Repository URL : " ).append( repo.getUrl() );
buf.append( "\n Repository Name : " ).append( repo.getModel().getName() );
buf.append( "\n Repository Layout : " ).append( repo.getModel().getLayoutName() );
buf.append( "\n Known Consumers : " );
if ( !isEmpty( knownConsumers ) )
{
buf.append( "(" ).append( knownConsumers.size() ).append( " configured)" );
for ( java.util.Iterator iter = knownConsumers.iterator(); iter.hasNext(); )
{
String id = (String) iter.next();
buf.append( "\n " ).append( id );
}
</code>
}
else
{
buf.append( "<none>" );
}
buf.append( "\n Invalid Consumers : " );
if ( !isEmpty( invalidConsumers ) )
{
buf.append( "(" ).append( invalidConsumers.size() ).append( " configured)" );
for ( java.util.Iterator iter = invalidConsumers.iterator(); iter.hasNext(); )
{
String id = (String) iter.next();
buf.append( "\n " ).append( id );
}
}
else
{
buf.append( "<none>" );
}
buf.append( "\n Duration : " );
buf.append( org.apache.maven.archiva.common.utils.DateUtil.getDuration( this.getDuration() ) );
buf.append( "\n When Gathered : " ).append( df.format( this.getWhenGathered() ) );
buf.append( "\n Total File Count : " ).append( this.getTotalFileCount() );
long averageMsPerFile = ( this.getDuration() / this.getTotalFileCount() );
buf.append( "\n Avg Time Per File : " );
buf.append( org.apache.maven.archiva.common.utils.DateUtil.getDuration( averageMsPerFile ) );
buf.append( "\n______________________________________________________________" );
return buf.toString();
}
]]></code>
</codeSegment>
</codeSegments>
</class>

View File

@ -19,8 +19,10 @@ package org.apache.maven.archiva.repository.scanner;
* under the License.
*/
import org.apache.commons.collections.Closure;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
@ -123,6 +125,38 @@ public class DefaultRepositoryScanner
// Execute scan.
dirWalker.scan();
return scannerInstance.getStatistics();
RepositoryContentStatistics stats = scannerInstance.getStatistics();
ConsumerIdClosure consumerIdList;
consumerIdList = new ConsumerIdClosure();
CollectionUtils.forAllDo( knownContentConsumers, consumerIdList );
stats.setKnownConsumers( consumerIdList.getList() );
consumerIdList = new ConsumerIdClosure();
CollectionUtils.forAllDo( invalidContentConsumers, consumerIdList );
stats.setInvalidConsumers( consumerIdList.getList() );
return stats;
}
class ConsumerIdClosure
implements Closure
{
private List list = new ArrayList();
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) input;
list.add( consumer.getId() );
}
}
public List getList()
{
return list;
}
}
}

View File

@ -24,11 +24,13 @@ import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.collections.functors.IfClosure;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryScanningConfiguration;
import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import java.util.ArrayList;
import java.util.HashMap;
@ -44,6 +46,8 @@ import java.util.Map;
* @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers"
*/
public class RepositoryContentConsumers
extends AbstractLogEnabled
implements Initializable
{
/**
* @plexus.requirement
@ -60,6 +64,10 @@ public class RepositoryContentConsumers
*/
private List availableInvalidConsumers;
private SelectedKnownRepoConsumersPredicate selectedKnownPredicate;
private SelectedInvalidRepoConsumersPredicate selectedInvalidPredicate;
class SelectedKnownRepoConsumersPredicate
implements Predicate
{
@ -70,14 +78,14 @@ public class RepositoryContentConsumers
if ( object instanceof KnownRepositoryContentConsumer )
{
KnownRepositoryContentConsumer known = (KnownRepositoryContentConsumer) object;
Configuration config = archivaConfiguration.getConfiguration();
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration()
.getRepositoryScanning();
return config.getRepositoryScanning().getKnownContentConsumers().contains( known.getId() );
return scanning.getKnownContentConsumers().contains( known.getId() );
}
return satisfies;
}
}
class SelectedInvalidRepoConsumersPredicate
@ -90,9 +98,10 @@ public class RepositoryContentConsumers
if ( object instanceof InvalidRepositoryContentConsumer )
{
InvalidRepositoryContentConsumer invalid = (InvalidRepositoryContentConsumer) object;
Configuration config = archivaConfiguration.getConfiguration();
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration()
.getRepositoryScanning();
return config.getRepositoryScanning().getInvalidContentConsumers().contains( invalid.getId() );
return scanning.getInvalidContentConsumers().contains( invalid.getId() );
}
return satisfies;
@ -119,22 +128,19 @@ public class RepositoryContentConsumers
}
}
public Predicate getKnownSelectionPredicate()
public void initialize()
throws InitializationException
{
return new SelectedKnownRepoConsumersPredicate();
this.selectedKnownPredicate = new SelectedKnownRepoConsumersPredicate();
this.selectedInvalidPredicate = new SelectedInvalidRepoConsumersPredicate();
}
public Predicate getInvalidSelectionPredicate()
{
return new SelectedInvalidRepoConsumersPredicate();
}
public List getSelectedKnownConsumerIds()
{
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration().getRepositoryScanning();
return scanning.getKnownContentConsumers();
}
public List getSelectedInvalidConsumerIds()
{
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration().getRepositoryScanning();
@ -144,7 +150,7 @@ public class RepositoryContentConsumers
public Map getSelectedKnownConsumersMap()
{
RepoConsumerToMapClosure consumerMapClosure = new RepoConsumerToMapClosure();
Closure ifclosure = IfClosure.getInstance( getKnownSelectionPredicate(), consumerMapClosure );
Closure ifclosure = IfClosure.getInstance( selectedKnownPredicate, consumerMapClosure );
CollectionUtils.forAllDo( availableKnownConsumers, ifclosure );
return consumerMapClosure.getMap();
@ -153,28 +159,24 @@ public class RepositoryContentConsumers
public Map getSelectedInvalidConsumersMap()
{
RepoConsumerToMapClosure consumerMapClosure = new RepoConsumerToMapClosure();
Closure ifclosure = IfClosure.getInstance( getInvalidSelectionPredicate(), consumerMapClosure );
Closure ifclosure = IfClosure.getInstance( selectedInvalidPredicate, consumerMapClosure );
CollectionUtils.forAllDo( availableInvalidConsumers, ifclosure );
return consumerMapClosure.getMap();
}
public List getSelectedKnownConsumers()
{
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration().getRepositoryScanning();
List ret = new ArrayList();
ret.addAll( CollectionUtils.select( scanning.getKnownContentConsumers(), getKnownSelectionPredicate() ));
ret.addAll( CollectionUtils.select( availableKnownConsumers, selectedKnownPredicate ) );
return ret;
}
public List getSelectedInvalidConsumers()
{
RepositoryScanningConfiguration scanning = archivaConfiguration.getConfiguration().getRepositoryScanning();
List ret = new ArrayList();
ret.addAll( CollectionUtils.select( scanning.getInvalidContentConsumers(), getInvalidSelectionPredicate() ));
ret.addAll( CollectionUtils.select( availableInvalidConsumers, selectedInvalidPredicate ) );
return ret;
}

View File

@ -23,7 +23,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.DateUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
@ -40,7 +39,6 @@ import org.codehaus.plexus.tools.cli.AbstractCli;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -187,24 +185,7 @@ public class ArchivaCli
RepositoryContentStatistics stats = scanner.scan( repo, knownConsumerList, invalidConsumerList,
ignoredContent, RepositoryScanner.FRESH_SCAN );
SimpleDateFormat df = new SimpleDateFormat();
System.out.println( "" );
System.out.println( ".\\ Scan of " + repo.getId() + " \\.__________________________________________" );
System.out.println( " Repository URL : " + repo.getUrl() );
System.out.println( " Repository Name : " + repo.getModel().getName() );
System.out.println( " Repository Layout : " + repo.getModel().getLayoutName() );
System.out.println( " Consumers : (" + knownConsumerList.size() + " active)" );
for ( Iterator iter = knownConsumerList.iterator(); iter.hasNext(); )
{
RepositoryContentConsumer consumer = (RepositoryContentConsumer) iter.next();
System.out.println( " " + consumer.getId() + " - " + consumer.getDescription() );
}
System.out.println( " Duration : " + DateUtil.getDuration( stats.getDuration() ) );
System.out.println( " When Gathered : " + df.format( stats.getWhenGathered() ) );
System.out.println( " Total File Count : " + stats.getTotalFileCount() );
long averageMsPerFile = ( stats.getDuration() / stats.getTotalFileCount() );
System.out.println( " Avg Time Per File : " + DateUtil.getDuration( averageMsPerFile ) );
System.out.println( "______________________________________________________________" );
System.out.println( "\n" + stats.toDump( repo ) );
}
catch ( RepositoryException e )
{

View File

@ -41,14 +41,6 @@
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-core-consumers</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-database-consumers</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-taskqueue</artifactId>
@ -77,6 +69,22 @@
<version>1.0-alpha-2</version>
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<version>1.8.0.7</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-core-consumers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-database-consumers</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId>
@ -98,23 +106,23 @@
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.2</version>
<configuration>
<excludes>
<exclude>**/Abstract*</exclude>
<exclude>**/*TestCase.java</exclude>
<exclude>**/*Tests.java</exclude>
<exclude>**/*TestSuite.java</exclude>
<exclude>**/ArchivaScheduledTaskExecutorTest*</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-maven-plugin</artifactId>
<executions>
<execution>
<id>merge</id>
<goals>
<goal>merge-descriptors</goal>
</goals>
<configuration>
<descriptors>
<descriptor>${basedir}/src/main/resources/META-INF/plexus/components.xml</descriptor>
<descriptor>${project.build.directory}/generated-resources/plexus/META-INF/plexus/components.xml</descriptor>
</descriptors>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

View File

@ -1,120 +0,0 @@
package org.apache.maven.archiva.scheduled;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.taskqueue.DefaultTaskQueue;
import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.TaskQueueException;
import java.util.Iterator;
import java.util.List;
/**
* ArchivaTaskQueue
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
* @plexus.component role="org.codehaus.plexus.taskqueue.TaskQueue"
* role-hint="archiva-task-queue"
* lifecycle-handler="plexus-configurable"
*/
public class ArchivaTaskQueue
extends DefaultTaskQueue
{
public ArchivaTaskQueue()
{
super();
/* do nothing special */
}
public boolean hasDatabaseTaskInQueue()
{
try
{
List queue = getQueueSnapshot();
Iterator it = queue.iterator();
while ( it.hasNext() )
{
Task task = (Task) it.next();
if ( task instanceof DatabaseTask )
{
return true;
}
}
return false;
}
catch ( TaskQueueException e )
{
return false;
}
}
public boolean hasFilesystemTaskInQueue()
{
try
{
List queue = getQueueSnapshot();
Iterator it = queue.iterator();
while ( it.hasNext() )
{
Task task = (Task) it.next();
if ( task instanceof RepositoryTask )
{
return true;
}
}
return false;
}
catch ( TaskQueueException e )
{
return false;
}
}
public boolean hasRepositoryTaskInQueue( String repoid )
{
try
{
List queue = getQueueSnapshot();
Iterator it = queue.iterator();
while ( it.hasNext() )
{
Task task = (Task) it.next();
if ( task instanceof RepositoryTask )
{
RepositoryTask rtask = (RepositoryTask) task;
if ( StringUtils.equals( repoid, rtask.getRepositoryId() ) )
{
return true;
}
}
}
return false;
}
catch ( TaskQueueException e )
{
return false;
}
}
}

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.scheduled;
* under the License.
*/
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.taskqueue.TaskQueueException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
/**
@ -32,15 +36,28 @@ public interface ArchivaTaskScheduler
* The Plexus component role.
*/
public final static String ROLE = ArchivaTaskScheduler.class.getName();
public ArchivaTaskQueue getTaskQueue();
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException;
public boolean isProcessingDatabaseTask()
throws ArchivaException;
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException;
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException;
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException;
public void scheduleAllRepositoryTasks()
throws TaskExecutionException;
public void scheduleDatabaseTasks()
throws TaskExecutionException;
public void scheduleRepositoryTask( String repositoryId )
throws TaskExecutionException;
}

View File

@ -19,9 +19,14 @@ package org.apache.maven.archiva.scheduled;
* under the License.
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
@ -30,6 +35,7 @@ import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import org.codehaus.plexus.scheduler.Scheduler;
import org.codehaus.plexus.taskqueue.TaskQueue;
import org.codehaus.plexus.taskqueue.TaskQueueException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.quartz.CronTrigger;
import org.quartz.JobDataMap;
@ -56,43 +62,48 @@ public class DefaultArchivaTaskScheduler
*/
private Scheduler scheduler;
/**
* @plexus.requirement role-hint="archiva-task-queue"
* @plexus.requirement role-hint="database-update"
*/
private TaskQueue archivaTaskQueue;
private TaskQueue databaseUpdateQueue;
/**
* @plexus.requirement role-hint="repository-scanning"
*/
private TaskQueue repositoryScanningQueue;
/**
* @plexus.requirement
*/
private ArchivaConfiguration archivaConfiguration;
public static final String DATABASE_SCAN_GROUP = "database-group";
public static final String DATABASE_JOB = "database-job";
public static final String DATABASE_JOB_TRIGGER = "database-job-trigger";
public static final String REPOSITORY_SCAN_GROUP = "repository-group";
public static final String REPOSITORY_JOB = "repository-job";
public static final String REPOSITORY_JOB_TRIGGER = "repository-job-trigger";
public void start()
throws StartingException
{
try
{
List repositories = archivaConfiguration.getConfiguration().getRepositories();
for ( Iterator i = repositories.iterator(); i.hasNext(); )
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
scheduleRepositoryJobs( repoConfig );
}
scheduleDatabaseJobs( );
List repositories = archivaConfiguration.getConfiguration().getRepositories();
for ( Iterator i = repositories.iterator(); i.hasNext(); )
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration) i.next();
scheduleRepositoryJobs( repoConfig );
}
scheduleDatabaseJobs();
}
catch ( SchedulerException e )
{
@ -101,66 +112,68 @@ public class DefaultArchivaTaskScheduler
}
private void scheduleRepositoryJobs( RepositoryConfiguration repoConfig )
throws SchedulerException
throws SchedulerException
{
if ( repoConfig.getRefreshCronExpression() == null )
{
getLogger().warn( "Skipping job, no cron expression for " + repoConfig.getId() );
return;
}
// get the cron string for these database scanning jobs
String cronString = repoConfig.getRefreshCronExpression();
String cronString = repoConfig.getRefreshCronExpression();
// setup the unprocessed artifact job
JobDetail repositoryJob =
new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId() , REPOSITORY_SCAN_GROUP, RepositoryTaskJob.class );
JobDetail repositoryJob = new JobDetail( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP,
RepositoryTaskJob.class );
JobDataMap dataMap = new JobDataMap();
dataMap.put( RepositoryTaskJob.TASK_QUEUE, archivaTaskQueue );
dataMap.put( RepositoryTaskJob.TASK_QUEUE, repositoryScanningQueue );
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT );
dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() );
repositoryJob.setJobDataMap( dataMap );
try
try
{
CronTrigger trigger =
new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId() , REPOSITORY_SCAN_GROUP, cronString );
CronTrigger trigger = new CronTrigger( REPOSITORY_JOB_TRIGGER + ":" + repoConfig.getId(),
REPOSITORY_SCAN_GROUP, cronString );
scheduler.scheduleJob( repositoryJob, trigger );
}
catch ( ParseException e )
{
getLogger().error( "ParseException in repository scanning cron expression, disabling repository scanning for '" + repoConfig.getId() + "': " + e.getMessage() );
getLogger().error(
"ParseException in repository scanning cron expression, disabling repository scanning for '"
+ repoConfig.getId() + "': " + e.getMessage() );
}
}
private void scheduleDatabaseJobs( )
private void scheduleDatabaseJobs()
throws SchedulerException
{
{
String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression();
// setup the unprocessed artifact job
JobDetail databaseJob =
new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class );
JobDataMap dataMap = new JobDataMap();
dataMap.put( DatabaseTaskJob.TASK_QUEUE, archivaTaskQueue );
dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue );
databaseJob.setJobDataMap( dataMap );
try
try
{
CronTrigger trigger =
new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString );
scheduler.scheduleJob( databaseJob, trigger );
}
catch ( ParseException e )
{
getLogger().error( "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
getLogger().error(
"ParseException in database scanning cron expression, disabling database scanning: "
+ e.getMessage() );
}
}
public void stop()
@ -168,7 +181,7 @@ public class DefaultArchivaTaskScheduler
{
try
{
scheduler.unscheduleJob( DATABASE_JOB, DATABASE_SCAN_GROUP );
scheduler.unscheduleJob( DATABASE_JOB, DATABASE_SCAN_GROUP );
}
catch ( SchedulerException e )
{
@ -176,7 +189,6 @@ public class DefaultArchivaTaskScheduler
}
}
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
// nothing to do
@ -191,11 +203,11 @@ public class DefaultArchivaTaskScheduler
if ( "cronExpression".equals( propertyName ) )
{
getLogger().debug( "Restarting the database scheduled task after property change: " + propertyName );
try
{
scheduler.unscheduleJob( DATABASE_JOB, DATABASE_SCAN_GROUP );
scheduleDatabaseJobs();
}
catch ( SchedulerException e )
@ -203,24 +215,24 @@ public class DefaultArchivaTaskScheduler
getLogger().error( "Error restarting the database scanning job after property change." );
}
}
// refreshCronExpression comes from the repositories section
//
// currently we have to reschedule all repo jobs because we don't know where the changed one came from
if ( "refreshCronExpression".equals( propertyName ) )
{
List repositories = archivaConfiguration.getConfiguration().getRepositories();
for ( Iterator i = repositories.iterator(); i.hasNext(); )
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
RepositoryConfiguration repoConfig = (RepositoryConfiguration) i.next();
if ( repoConfig.getRefreshCronExpression() != null )
{
try
{
// unschedule handles jobs that might not exist
scheduler.unscheduleJob( REPOSITORY_JOB + ":" + repoConfig.getId() , REPOSITORY_SCAN_GROUP );
scheduler.unscheduleJob( REPOSITORY_JOB + ":" + repoConfig.getId(), REPOSITORY_SCAN_GROUP );
scheduleRepositoryJobs( repoConfig );
}
catch ( SchedulerException e )
@ -232,19 +244,20 @@ public class DefaultArchivaTaskScheduler
}
}
public void scheduleAllRepositoryTasks() throws TaskExecutionException
public void scheduleAllRepositoryTasks()
throws TaskExecutionException
{
try
{
List repositories = archivaConfiguration.getConfiguration().getRepositories();
for ( Iterator i = repositories.iterator(); i.hasNext(); )
{
RepositoryConfiguration repoConfig = (RepositoryConfiguration)i.next();
scheduleRepositoryJobs( repoConfig );
RepositoryConfiguration repoConfig = (RepositoryConfiguration) i.next();
scheduleRepositoryJobs( repoConfig );
}
}
catch ( SchedulerException e )
{
@ -252,7 +265,8 @@ public class DefaultArchivaTaskScheduler
}
}
public void scheduleDatabaseTasks() throws TaskExecutionException
public void scheduleDatabaseTasks()
throws TaskExecutionException
{
try
{
@ -265,22 +279,82 @@ public class DefaultArchivaTaskScheduler
}
}
public void scheduleRepositoryTask( String repositoryId ) throws TaskExecutionException
public void scheduleRepositoryTask( String repositoryId )
throws TaskExecutionException
{
try
{
RepositoryConfiguration repoConfig = archivaConfiguration.getConfiguration().findRepositoryById( repositoryId );
scheduleRepositoryJobs( repoConfig );
RepositoryConfiguration repoConfig = archivaConfiguration.getConfiguration()
.findRepositoryById( repositoryId );
scheduleRepositoryJobs( repoConfig );
}
catch ( SchedulerException e )
{
throw new TaskExecutionException( "Unable to schedule repository jobs: " + e.getMessage(), e );
}
}
}
public boolean isProcessingAnyRepositoryTask()
throws ArchivaException
{
List queue = null;
try
{
queue = repositoryScanningQueue.getQueueSnapshot();
}
catch ( TaskQueueException e )
{
throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
}
return !queue.isEmpty();
}
public boolean isProcessingRepositoryTask( String repositoryId )
throws ArchivaException
{
List queue = null;
try
{
queue = repositoryScanningQueue.getQueueSnapshot();
}
catch ( TaskQueueException e )
{
throw new ArchivaException( "Unable to get repository scanning queue:" + e.getMessage(), e );
}
return CollectionUtils.exists( queue, new RepositoryTaskSelectionPredicate( repositoryId ) );
}
public boolean isProcessingDatabaseTask()
throws ArchivaException
{
List queue = null;
try
{
queue = databaseUpdateQueue.getQueueSnapshot();
}
catch ( TaskQueueException e )
{
throw new ArchivaException( "Unable to get database update queue:" + e.getMessage(), e );
}
return !queue.isEmpty();
}
public ArchivaTaskQueue getTaskQueue()
public void queueRepositoryTask( RepositoryTask task )
throws TaskQueueException
{
return (ArchivaTaskQueue) archivaTaskQueue;
repositoryScanningQueue.put( task );
}
public void queueDatabaseTask( DatabaseTask task )
throws TaskQueueException
{
databaseUpdateQueue.put( task );
}
}

View File

@ -0,0 +1,89 @@
package org.apache.maven.archiva.scheduled.executors;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
/**
* ArchivaDatabaseTaskExecutor
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component
* role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
* role-hint="database-update"
*/
public class ArchivaDatabaseUpdateTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor, Initializable
{
/**
* @plexus.requirement role-hint="jdo"
*/
private DatabaseUpdater databaseUpdater;
public void initialize()
throws InitializationException
{
getLogger().info( "Initialized " + this.getClass().getName() );
}
public void executeTask( Task task )
throws TaskExecutionException
{
DatabaseTask dbtask = (DatabaseTask) task;
getLogger().info( "Executing task from queue with job name: " + dbtask.getName() );
long time = System.currentTimeMillis();
try
{
getLogger().info( "Task: Updating unprocessed artifacts" );
databaseUpdater.updateAllUnprocessed();
}
catch ( ArchivaDatabaseException e )
{
throw new TaskExecutionException( "Error running unprocessed updater", e );
}
try
{
getLogger().info( "Task: Updating processed artifacts" );
databaseUpdater.updateAllProcessed();
}
catch ( ArchivaDatabaseException e )
{
throw new TaskExecutionException( "Error running processed updater", e );
}
time = System.currentTimeMillis() - time;
getLogger().info( "Finished database task in " + time + "ms." );
}
}

View File

@ -20,19 +20,18 @@ package org.apache.maven.archiva.scheduled.executors;
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.utils.DateUtil;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.RepositoryDAO;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
@ -40,27 +39,24 @@ import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import java.util.List;
/**
* ArchivaRepositoryScanningTaskExecutor
*
* @author <a href="mailto:jmcconnell@apache.org">Jesse McConnell</a>
* @version $Id:$
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
* role-hint="archiva-task-executor"
* @plexus.component
* role="org.codehaus.plexus.taskqueue.execution.TaskExecutor"
* role-hint="repository-scanning"
*/
public class ArchivaScheduledTaskExecutor
public class ArchivaRepositoryScanningTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor
implements TaskExecutor, Initializable
{
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement role-hint="jdo"
*/
private DatabaseUpdater databaseUpdater;
/**
* @plexus.requirement role-hint="jdo"
*/
@ -73,65 +69,21 @@ public class ArchivaScheduledTaskExecutor
*/
private RepositoryScanner repoScanner;
public void initialize()
throws InitializationException
{
getLogger().info( "Initialized " + this.getClass().getName() );
}
public void executeTask( Task task )
throws TaskExecutionException
{
if ( task instanceof DatabaseTask )
{
executeDatabaseTask( (DatabaseTask) task );
}
else if ( task instanceof RepositoryTask )
{
executeRepositoryTask( (RepositoryTask) task );
}
else
{
throw new TaskExecutionException( "Unknown Task: " + task.toString() );
}
}
private void executeDatabaseTask( DatabaseTask task )
throws TaskExecutionException
{
getLogger().info( "Executing task from queue with job name: " + task.getName() );
long time = System.currentTimeMillis();
RepositoryTask repoTask = (RepositoryTask) task;
getLogger().info( "Executing task from queue with job name: " + repoTask.getName() );
try
{
getLogger().info( "Task: Updating unprocessed artifacts" );
databaseUpdater.updateAllUnprocessed();
}
catch ( ArchivaDatabaseException e )
{
throw new TaskExecutionException( "Error running unprocessed updater", e );
}
try
{
getLogger().info( "Task: Updating processed artifacts" );
databaseUpdater.updateAllProcessed();
}
catch ( ArchivaDatabaseException e )
{
throw new TaskExecutionException( "Error running processed updater", e );
}
time = System.currentTimeMillis() - time;
getLogger().info( "Finished database task in " + time + "ms." );
}
private void executeRepositoryTask( RepositoryTask task )
throws TaskExecutionException
{
getLogger().info( "Executing task from queue with job name: " + task.getName() );
try
{
ArchivaRepository arepo = repositoryDAO.getRepository( task.getRepositoryId() );
ArchivaRepository arepo = repositoryDAO.getRepository( repoTask.getRepositoryId() );
long sinceWhen = RepositoryScanner.FRESH_SCAN;
@ -147,7 +99,7 @@ public class ArchivaScheduledTaskExecutor
dao.save( stats );
getLogger().info( "Finished repository task: " + DateUtil.getDuration( stats.getDuration() ) + "." );
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) );
}
catch ( ArchivaDatabaseException e )
{

View File

@ -0,0 +1,54 @@
package org.apache.maven.archiva.scheduled.tasks;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.Predicate;
import org.apache.commons.lang.StringUtils;
/**
* RepositoryTaskSelectionPredicate
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryTaskSelectionPredicate
implements Predicate
{
private String repoid;
public RepositoryTaskSelectionPredicate( String repositoryId )
{
this.repoid = repositoryId;
}
public boolean evaluate( Object object )
{
boolean satisfies = false;
if ( object instanceof RepositoryTask )
{
RepositoryTask task = (RepositoryTask) object;
return StringUtils.equals( repoid, task.getRepositoryId() );
}
return satisfies;
}
}

View File

@ -0,0 +1,104 @@
<?xml version="1.0" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<!--
|
| Database Update Task Queue / Executor
|
-->
<component>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
<role-hint>database-update</role-hint>
<implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
<lifecycle-handler>plexus-configurable</lifecycle-handler>
<configuration>
<task-entry-evaluators>
</task-entry-evaluators>
<task-exit-evaluators>
</task-exit-evaluators>
<task-viability-evaluators>
</task-viability-evaluators>
</configuration>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
<role-hint>database-update</role-hint>
<implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
<instantiation-strategy>singleton</instantiation-strategy>
<requirements>
<requirement>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>database-update</role-hint>
</requirement>
<requirement>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
<role-hint>database-update</role-hint>
</requirement>
</requirements>
<configuration>
<name>database-update</name>
</configuration>
</component>
<!--
|
| Repository Scanning Task Queue / Executor
|
-->
<component>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
<role-hint>repository-scanning</role-hint>
<implementation>org.codehaus.plexus.taskqueue.DefaultTaskQueue</implementation>
<lifecycle-handler>plexus-configurable</lifecycle-handler>
<configuration>
<task-entry-evaluators>
</task-entry-evaluators>
<task-exit-evaluators>
</task-exit-evaluators>
<task-viability-evaluators>
</task-viability-evaluators>
</configuration>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
<role-hint>repository-scanning</role-hint>
<implementation>org.codehaus.plexus.taskqueue.execution.ThreadedTaskQueueExecutor</implementation>
<instantiation-strategy>singleton</instantiation-strategy>
<requirements>
<requirement>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>repository-scanning</role-hint>
</requirement>
<requirement>
<role>org.codehaus.plexus.taskqueue.TaskQueue</role>
<role-hint>repository-scanning</role-hint>
</requirement>
</requirements>
<configuration>
<name>repository-scanning</name>
</configuration>
</component>
</components>
</component-set>

View File

@ -147,19 +147,19 @@
</patterns>
</fileType>
</fileTypes>
<goodConsumers>
<goodConsumer>update-db-artifact</goodConsumer>
<goodConsumer>create-missing-checksums</goodConsumer>
<goodConsumer>update-db-repository-metadata</goodConsumer>
<goodConsumer>validate-checksum</goodConsumer>
<goodConsumer>validate-signature</goodConsumer>
<goodConsumer>index-content</goodConsumer>
<goodConsumer>auto-remove</goodConsumer>
<goodConsumer>auto-rename</goodConsumer>
</goodConsumers>
<badConsumers>
<badConsumer>update-db-bad-content</badConsumer>
</badConsumers>
<knownContentConsumers>
<knownContentConsumer>update-db-artifact</knownContentConsumer>
<knownContentConsumer>create-missing-checksums</knownContentConsumer>
<knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
<knownContentConsumer>validate-checksum</knownContentConsumer>
<knownContentConsumer>validate-signature</knownContentConsumer>
<knownContentConsumer>index-content</knownContentConsumer>
<knownContentConsumer>auto-remove</knownContentConsumer>
<knownContentConsumer>auto-rename</knownContentConsumer>
</knownContentConsumers>
<invalidContentConsumers>
<invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
</invalidContentConsumers>
</repositoryScanning>
<databaseScanning>
@ -167,11 +167,11 @@
<unprocessedConsumers>
<unprocessedConsumer>update-db-artifact</unprocessedConsumer>
</unprocessedConsumers>
<processedConsumers>
<processedConsumer>not-present-remove-db-artifact</processedConsumer>
<processedConsumer>not-present-remove-db-project</processedConsumer>
<processedConsumer>not-present-remove-indexed</processedConsumer>
</processedConsumers>
<cleanupConsumers>
<cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
<cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
<cleanupConsumer>not-present-remove-indexed</cleanupConsumer>
</cleanupConsumers>
</databaseScanning>
</configuration>

View File

@ -19,8 +19,23 @@ package org.apache.maven.archiva.scheduled.executors;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryDAO;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.jpox.SchemaTool;
import java.io.File;
import java.net.URL;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@ -30,28 +45,13 @@ import javax.jdo.JDOHelper;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryDAO;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.jpox.SchemaTool;
/**
* IndexerTaskExecutorTest
* ArchivaDatabaseUpdateTaskExecutorTest
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id:$
*/
public class ArchivaScheduledTaskExecutorTest
public class ArchivaDatabaseUpdateTaskExecutorTest
extends PlexusTestCase
{
private TaskExecutor taskExecutor;
@ -137,7 +137,7 @@ public class ArchivaScheduledTaskExecutorTest
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class.getName(), "test-archiva-task-executor" );
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-database-update" );
}
public void testExecutor() throws Exception
@ -150,7 +150,6 @@ public class ArchivaScheduledTaskExecutorTest
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
// Create it
ArchivaRepository repo =
repoDao.createRepository( "testRepo", "Test Repository", repoUri );
@ -166,15 +165,16 @@ public class ArchivaScheduledTaskExecutorTest
assertNotNull( repoSaved.getModel() );
assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
RepositoryTask repoTask = new RepositoryTask();
repoTask.setName( "testRepoTask" );
repoTask.setRepositoryId( "testRepo" );
taskExecutor.executeTask( repoTask );
ArtifactDAO adao = dao.getArtifactDAO();
ArchivaArtifact sqlArtifact = adao.createArtifact( "javax.sql", "jdbc", "2.0", "", "jar" );
sqlArtifact.getModel().setLastModified( new Date() );
sqlArtifact.getModel().setSize( 1234 );
sqlArtifact.getModel().setOrigin( "testcase" );
sqlArtifact.getModel().setWhenProcessed( null );
adao.saveArtifact( sqlArtifact );
ArchivaArtifact artifact = adao.getArtifact( "javax.sql", "jdbc", "2.0", null, "jar" );
assertNotNull( artifact );
@ -182,7 +182,7 @@ public class ArchivaScheduledTaskExecutorTest
List unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
assertNotNull( unprocessedResultList );
assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
assertEquals("Incorrect number of unprocessed artifacts detected.", 1, unprocessedResultList.size() );
DatabaseTask dataTask = new DatabaseTask();
@ -193,7 +193,7 @@ public class ArchivaScheduledTaskExecutorTest
List processedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( true ) );
assertNotNull( processedResultList );
assertEquals("Incorrect number of processed artifacts detected.", 8, processedResultList.size() );
assertEquals("Incorrect number of processed artifacts detected.", 1, processedResultList.size() );
}

View File

@ -0,0 +1,180 @@
package org.apache.maven.archiva.scheduled.executors;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryDAO;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.jpox.SchemaTool;
import java.io.File;
import java.net.URL;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.jdo.JDOHelper;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
/**
* ArchivaRepositoryScanningTaskExecutorTest
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class ArchivaRepositoryScanningTaskExecutorTest
extends PlexusTestCase
{
private TaskExecutor taskExecutor;
protected ArchivaDAO dao;
protected void setUp()
throws Exception
{
super.setUp();
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
/* derby version
File derbyDbDir = new File( "target/plexus-home/testdb" );
if ( derbyDbDir.exists() )
{
FileUtils.deleteDirectory( derbyDbDir );
}
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
*/
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateTables", "true" );
jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
Properties properties = jdoFactory.getProperties();
for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
{
Map.Entry entry = (Map.Entry) it.next();
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
URL jdoFileUrls[] = new URL[] { getClass()
.getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
fail( "Unable to process test " + getName() + " - missing package.jdo." );
}
File propsFile = null; // intentional
boolean verbose = true;
SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
assertNotNull( pmf );
PersistenceManager pm = pmf.getPersistenceManager();
pm.close();
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
}
public void testExecutor() throws Exception
{
RepositoryDAO repoDao = dao.getRepositoryDAO();
File repoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
// Create it
ArchivaRepository repo =
repoDao.createRepository( "testRepo", "Test Repository", repoUri );
assertNotNull( repo );
// Set some mandatory values
repo.getModel().setCreationSource( "Test Case" );
repo.getModel().setLayoutName( "default" );
// Save it.
ArchivaRepository repoSaved = repoDao.saveRepository( repo );
assertNotNull( repoSaved );
assertNotNull( repoSaved.getModel() );
assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
RepositoryTask repoTask = new RepositoryTask();
repoTask.setName( "testRepoTask" );
repoTask.setRepositoryId( "testRepo" );
taskExecutor.executeTask( repoTask );
ArtifactDAO adao = dao.getArtifactDAO();
List unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
assertNotNull( unprocessedResultList );
assertEquals("Incorrect number of unprocessed artifacts detected.", 8, unprocessedResultList.size() );
}
}

View File

@ -0,0 +1 @@
d41d8cd98f00b204e9800998ecf8427e jdbc-2.0.jar

View File

@ -0,0 +1 @@
da39a3ee5e6b4b0d3255bfef95601890afd80709 jdbc-2.0.jar

View File

@ -0,0 +1 @@
bc479af1df809dbabb92e29548776b84 A-1.0.pom

View File

@ -0,0 +1 @@
fbb4c97603c64f3915c88243e1ea49f1a238afa7 A-1.0.pom

View File

@ -0,0 +1 @@
fd4275a6811332d67075ffd879d13d4a A-1.0.war

View File

@ -0,0 +1 @@
754133cd9c36adef86d35b96c0e96e11a9c6bfc9 A-1.0.war

View File

@ -0,0 +1 @@
c543ad342d1de7a4352fc9b0f42067b8 B-1.0.pom

View File

@ -0,0 +1 @@
be06d04d5824859253abf423394dc85d24971ba8 B-1.0.pom

View File

@ -0,0 +1 @@
1af6c812f02f24e1ba287647a6856cd5 B-2.0.pom

View File

@ -0,0 +1 @@
5d49f821499ab061c97457b3e6512fd1624a3033 B-2.0.pom

View File

@ -0,0 +1 @@
90f5c062bded5f794cd4ea9479b35173 C-1.0.pom

View File

@ -0,0 +1 @@
fcefa8220d30b7aa72a1b7422cc06336ca14bb6f C-1.0.pom

View File

@ -0,0 +1 @@
fd4275a6811332d67075ffd879d13d4a C-1.0.war

View File

@ -0,0 +1 @@
754133cd9c36adef86d35b96c0e96e11a9c6bfc9 C-1.0.war

View File

@ -0,0 +1 @@
764dd493029133aff4c0f7cb4be2d9b7 discovery-1.0.pom

View File

@ -0,0 +1 @@
a7aaf680caaf5bb971753e047c439f3fd4efa473 discovery-1.0.pom

View File

@ -0,0 +1,78 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>test-database-update</role-hint>
<implementation>org.apache.maven.archiva.scheduled.executors.ArchivaDatabaseUpdateTaskExecutor</implementation>
<description></description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.updater.DatabaseUpdater</role>
<role-hint>jdo</role-hint>
<field-name>databaseUpdater</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
</otherProperties>
</configuration>
</component>
</components>
</component-set>

View File

@ -22,19 +22,14 @@
<components>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>test-archiva-task-executor</role-hint>
<implementation>org.apache.maven.archiva.scheduled.executors.ArchivaScheduledTaskExecutor</implementation>
<role-hint>test-repository-scanning</role-hint>
<implementation>org.apache.maven.archiva.scheduled.executors.ArchivaRepositoryScanningTaskExecutor</implementation>
<description></description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<field-name>archivaConfiguration</field-name>
<role-hint>test-configuration</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.updater.DatabaseUpdater</role>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<field-name>databaseUpdater</field-name>
<field-name>dao</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
@ -42,19 +37,14 @@
<field-name>repositoryDAO</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.consumers.ArchivaArtifactConsumer</role>
<field-name>availableDBConsumers</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.consumers.RepositoryContentConsumer</role>
<field-name>availableRepositoryConsumers</field-name>
<role>org.apache.maven.archiva.repository.scanner.RepositoryScanner</role>
<field-name>repoScanner</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>

View File

@ -1 +0,0 @@
<configuration />

View File

@ -20,6 +20,7 @@ package org.apache.maven.archiva.web.action.admin;
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.apache.maven.archiva.scheduled.DefaultArchivaTaskScheduler;
import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
@ -63,27 +64,35 @@ public class IndexRepositoryAction
boolean scheduleTask = false;
if ( taskScheduler.getTaskQueue().hasFilesystemTaskInQueue() )
try
{
if ( taskScheduler.getTaskQueue().hasRepositoryTaskInQueue( repoid ) )
if ( taskScheduler.isProcessingAnyRepositoryTask() )
{
addActionError( "Repository [" + repoid + "] task was already queued." );
if ( taskScheduler.isProcessingRepositoryTask( repoid ) )
{
addActionError( "Repository [" + repoid + "] task was already queued." );
}
else
{
scheduleTask = true;
}
}
else
{
scheduleTask = true;
}
}
else
catch ( ArchivaException e )
{
scheduleTask = true;
scheduleTask = false;
addActionError( e.getMessage() );
}
if ( scheduleTask )
{
try
{
taskScheduler.getTaskQueue().put( task );
taskScheduler.queueRepositoryTask( task );
addActionMessage( "Your request to have repository [" + repoid + "] be indexed has been queued." );
}
catch ( TaskQueueException e )
@ -97,6 +106,18 @@ public class IndexRepositoryAction
return SUCCESS;
}
public void addActionMessage( String aMessage )
{
super.addActionMessage( aMessage );
getLogger().info( "[ActionMessage] " + aMessage );
}
public void addActionError( String anErrorMessage )
{
super.addActionError( anErrorMessage );
getLogger().warn( "[ActionError] " + anErrorMessage );
}
public SecureActionBundle getSecureActionBundle()
throws SecureActionException
{

View File

@ -19,8 +19,30 @@
-->
<plexus>
<components>
<load-on-start>
<component>
<role>org.apache.maven.archiva.configuration.ConfigurationUpgrade</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
<role-hint>database-update</role-hint>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskQueueExecutor</role>
<role-hint>repository-scanning</role-hint>
</component>
</load-on-start>
<components>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
@ -194,29 +216,6 @@
</component>
</components>
<load-on-start>
<component>
<role>org.apache.maven.archiva.configuration.ConfigurationUpgrade</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.apache.maven.archiva.scheduled.ArchivaTaskScheduler</role>
<role-hint>default</role-hint>
</component>
<component>
<role>org.codehaus.plexus.taskqueue.execution.TaskExecutor</role>
<role-hint>archiva-task-executor</role-hint>
</component>
</load-on-start>
<lifecycle-handler-manager implementation="org.codehaus.plexus.lifecycle.DefaultLifecycleHandlerManager">
<default-lifecycle-handler-id>plexus</default-lifecycle-handler-id>
<lifecycle-handlers>