mirror of https://github.com/apache/archiva.git
getting closer to some meaningful scheduler tests
git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches/archiva-jpox-database-refactor@528036 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9dd8cbb2b2
commit
0f1f2c658e
|
@ -37,6 +37,22 @@
|
|||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-database</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-repository-layer</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-core-consumers</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-database-consumers</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-lucene-consumers</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.plexus</groupId>
|
||||
<artifactId>plexus-taskqueue</artifactId>
|
||||
|
@ -71,11 +87,7 @@
|
|||
<version>1.0-alpha-2</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.maven.archiva</groupId>
|
||||
<artifactId>archiva-core-consumers</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
@ -22,7 +22,11 @@ package org.apache.maven.archiva.scheduled.executors;
|
|||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.database.ArchivaDatabaseException;
|
||||
import org.apache.maven.archiva.database.RepositoryDAO;
|
||||
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
|
||||
import org.apache.maven.archiva.model.ArchivaRepository;
|
||||
import org.apache.maven.archiva.repository.RepositoryException;
|
||||
import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
|
||||
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
|
||||
|
@ -35,6 +39,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -58,6 +63,17 @@ public class ArchivaScheduledTaskExecutor extends AbstractLogEnabled implements
|
|||
*/
|
||||
private DatabaseUpdater databaseUpdater;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="jdo"
|
||||
*/
|
||||
private RepositoryDAO repositoryDAO;
|
||||
|
||||
/**
|
||||
* The collection of available consumers.
|
||||
* @plexus.requirement role="org.apache.maven.archiva.consumers.ArchivaArtifactConsumer"
|
||||
*/
|
||||
private Map availableConsumers;
|
||||
|
||||
public void executeTask( Task task ) throws TaskExecutionException
|
||||
{
|
||||
|
||||
|
@ -112,10 +128,67 @@ public class ArchivaScheduledTaskExecutor extends AbstractLogEnabled implements
|
|||
|
||||
long time = System.currentTimeMillis();
|
||||
|
||||
// insert repository scanning codelets here
|
||||
try
|
||||
{
|
||||
ArchivaRepository arepo = repositoryDAO.getRepository( task.getRepositoryId() );
|
||||
|
||||
RepositoryScanner scanner = new RepositoryScanner();
|
||||
|
||||
scanner.scan( arepo, getActiveConsumerList(), true );
|
||||
|
||||
}
|
||||
catch ( ArchivaDatabaseException e )
|
||||
{
|
||||
throw new TaskExecutionException( "Database error when executing repository job.", e );
|
||||
}
|
||||
catch ( RepositoryException e )
|
||||
{
|
||||
throw new TaskExecutionException( "Repository error when executing repository job.", e );
|
||||
}
|
||||
|
||||
|
||||
time = System.currentTimeMillis() - time;
|
||||
|
||||
getLogger().info( "Finished repository task for " + time + "ms." );
|
||||
}
|
||||
|
||||
|
||||
private List getActiveConsumerList()
|
||||
{
|
||||
List activeConsumers = new ArrayList();
|
||||
|
||||
List configuredGoodConsumers = archivaConfiguration.getConfiguration().getRepositoryScanning().getGoodConsumers();
|
||||
List configuredBadConsumers = archivaConfiguration.getConfiguration().getRepositoryScanning().getBadConsumers();
|
||||
|
||||
for ( Iterator i = configuredGoodConsumers.iterator(); i.hasNext(); )
|
||||
{
|
||||
String consumer = (String)i.next();
|
||||
|
||||
if ( availableConsumers.containsKey( availableConsumers.get( consumer ) ) )
|
||||
{
|
||||
activeConsumers.add( availableConsumers.get( consumer ) );
|
||||
}
|
||||
else
|
||||
{
|
||||
getLogger().warn( "RequestedConsumer [" + consumer + "] does not exist. Skipping in repository scan." );
|
||||
}
|
||||
}
|
||||
|
||||
for ( Iterator i = configuredBadConsumers.iterator(); i.hasNext(); )
|
||||
{
|
||||
String consumer = (String)i.next();
|
||||
|
||||
if ( availableConsumers.containsKey( availableConsumers.get( consumer ) ) )
|
||||
{
|
||||
activeConsumers.add( availableConsumers.get( consumer ) );
|
||||
}
|
||||
else
|
||||
{
|
||||
getLogger().warn( "RequestedConsumer [" + consumer + "] does not exist. Skipping in repository scan." );
|
||||
}
|
||||
}
|
||||
|
||||
return activeConsumers;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,12 +22,27 @@ package org.apache.maven.archiva.scheduled.executors;
|
|||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.database.ArchivaDAO;
|
||||
import org.apache.maven.archiva.database.ArchivaDatabaseException;
|
||||
import org.apache.maven.archiva.database.RepositoryDAO;
|
||||
import org.apache.maven.archiva.model.ArchivaRepository;
|
||||
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
|
||||
import org.codehaus.plexus.PlexusTestCase;
|
||||
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
|
||||
import org.codehaus.plexus.jdo.JdoFactory;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
|
||||
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
|
||||
import org.jpox.SchemaTool;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.URL;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import javax.jdo.JDOHelper;
|
||||
import javax.jdo.PersistenceManager;
|
||||
import javax.jdo.PersistenceManagerFactory;
|
||||
|
||||
/**
|
||||
* IndexerTaskExecutorTest
|
||||
|
@ -40,26 +55,115 @@ public class ArchivaScheduledTaskExecutorTest
|
|||
{
|
||||
private TaskExecutor taskExecutor;
|
||||
|
||||
protected ArchivaDAO dao;
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
|
||||
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
|
||||
|
||||
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
|
||||
|
||||
/* derby version
|
||||
File derbyDbDir = new File( "target/plexus-home/testdb" );
|
||||
if ( derbyDbDir.exists() )
|
||||
{
|
||||
FileUtils.deleteDirectory( derbyDbDir );
|
||||
}
|
||||
|
||||
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
|
||||
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
|
||||
*/
|
||||
|
||||
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
|
||||
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
|
||||
|
||||
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
|
||||
|
||||
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
|
||||
|
||||
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
|
||||
|
||||
jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
|
||||
|
||||
// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.validateTables", "true" );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
|
||||
|
||||
jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
|
||||
|
||||
Properties properties = jdoFactory.getProperties();
|
||||
|
||||
for ( Iterator it = properties.entrySet().iterator(); it.hasNext(); )
|
||||
{
|
||||
Map.Entry entry = (Map.Entry) it.next();
|
||||
|
||||
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
|
||||
}
|
||||
|
||||
URL jdoFileUrls[] = new URL[] { getClass()
|
||||
.getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
|
||||
|
||||
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
|
||||
{
|
||||
fail( "Unable to process test " + getName() + " - missing package.jdo." );
|
||||
}
|
||||
|
||||
File propsFile = null; // intentional
|
||||
boolean verbose = true;
|
||||
|
||||
SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
|
||||
SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
|
||||
|
||||
PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
|
||||
|
||||
assertNotNull( pmf );
|
||||
|
||||
PersistenceManager pm = pmf.getPersistenceManager();
|
||||
|
||||
pm.close();
|
||||
|
||||
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
|
||||
|
||||
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class.getName(), "test-archiva-task-executor" );
|
||||
}
|
||||
|
||||
public void testExecutor()
|
||||
throws TaskExecutionException
|
||||
public void testExecutor() throws Exception
|
||||
{
|
||||
taskExecutor.executeTask( new TestRepositoryTask() );
|
||||
RepositoryDAO repoDao = dao.getRepositoryDAO();
|
||||
|
||||
// Create it
|
||||
ArchivaRepository repo =
|
||||
repoDao.createRepository( "testRepo", "Test Repository", "http://localhost:8080/repository/foo" );
|
||||
assertNotNull( repo );
|
||||
|
||||
// Set some mandatory values
|
||||
repo.getModel().setCreationSource( "Test Case" );
|
||||
repo.getModel().setLayoutName( "default" );
|
||||
|
||||
// Save it.
|
||||
ArchivaRepository repoSaved = repoDao.saveRepository( repo );
|
||||
assertNotNull( repoSaved );
|
||||
assertNotNull( repoSaved.getModel() );
|
||||
assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
|
||||
|
||||
RepositoryTask repoTask = new RepositoryTask();
|
||||
|
||||
repoTask.setName( "testTask" );
|
||||
repoTask.setRepositoryId( "testRepo" );
|
||||
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
}
|
||||
|
||||
class TestRepositoryTask
|
||||
extends RepositoryTask
|
||||
{
|
||||
public String getName()
|
||||
{
|
||||
return "TestRepositoryTask";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,15 @@
|
|||
<role-hint>jdo</role-hint>
|
||||
<field-name>databaseUpdater</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
|
||||
<role-hint>jdo</role-hint>
|
||||
<field-name>repositoryDAO</field-name>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.maven.archiva.consumers.ArchivaArtifactConsumer</role>
|
||||
<field-name>availableConsumers</field-name>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
|
Loading…
Reference in New Issue