[MRM-294]

- Changed DefaultCleanupReleasedSnapshots to CleanupReleasedSnapshotsRepositoryPurge
- Removed unneeded fields in RepositoryPurgeConsumer
- Added constructor for DaysOldRepositoryPurge, RetentionCountRepositoryPurge and CleanupReleasedSnapshotsRepositoryPurge
- Updated test classes and test resources



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@563064 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2007-08-06 08:22:35 +00:00
parent 45ad3a396b
commit 7db1bd8f9d
11 changed files with 279 additions and 676 deletions

View File

@ -19,7 +19,6 @@ package org.apache.maven.archiva.consumers.core.repository;
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepository; import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.layout.FilenameParts; import org.apache.maven.archiva.repository.layout.FilenameParts;
@ -28,29 +27,33 @@ import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout; import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.database.ArchivaDatabaseException; import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO; import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File; import java.io.File;
import java.io.FilenameFilter; import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
/** /**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/ */
public abstract class AbstractRepositoryPurge public abstract class AbstractRepositoryPurge
implements RepositoryPurge implements RepositoryPurge
{ {
private ArchivaRepository repository; protected ArchivaRepository repository;
private BidirectionalRepositoryLayout layout; protected BidirectionalRepositoryLayout layout;
private RepositoryContentIndex index; protected ArtifactDAO artifactDao;
private ArtifactDAO artifactDao; public AbstractRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao )
{
this.repository = repository;
this.layout = layout;
this.artifactDao = artifactDao;
}
/** /**
* Get all files from the directory that matches the specified filename. * Get all files from the directory that matches the specified filename.
@ -60,23 +63,14 @@ public abstract class AbstractRepositoryPurge
* @return * @return
*/ */
protected File[] getFiles( File dir, String filename ) protected File[] getFiles( File dir, String filename )
throws RepositoryPurgeException
{ {
FilenameFilter filter = new ArtifactFilenameFilter( filename ); FilenameFilter filter = new ArtifactFilenameFilter( filename );
if ( !dir.isDirectory() )
{
System.out.println( "File is not a directory." );
}
File[] files = dir.listFiles( filter ); File[] files = dir.listFiles( filter );
return files; return files;
} }
public abstract void process( String path, Configuration configuration )
throws RepositoryPurgeException;
/** /**
* Purge the repo. Update db and index of removed artifacts. * Purge the repo. Update db and index of removed artifacts.
* *
@ -84,49 +78,43 @@ public abstract class AbstractRepositoryPurge
* @throws RepositoryIndexException * @throws RepositoryIndexException
*/ */
protected void purge( File[] artifactFiles ) protected void purge( File[] artifactFiles )
throws RepositoryIndexException
{ {
List records = new ArrayList();
for ( int i = 0; i < artifactFiles.length; i++ ) for ( int i = 0; i < artifactFiles.length; i++ )
{ {
artifactFiles[i].delete(); artifactFiles[i].delete();
String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( getRepository().getUrl().getPath() ); String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( repository.getUrl().getPath() );
String artifactPath = artifactPathParts[artifactPathParts.length - 1]; String artifactPath = artifactPathParts[artifactPathParts.length - 1];
if ( !artifactPath.toUpperCase().endsWith( "SHA1" ) && !artifactPath.toUpperCase().endsWith( "MD5" ) ) if ( !artifactPath.toUpperCase().endsWith( "SHA1" ) && !artifactPath.toUpperCase().endsWith( "MD5" ) )
{ {
updateDatabase( artifactPath ); // intended to be swallowed
// continue updating the database for all artifacts
try
{
updateDatabase( artifactPath );
}
catch ( ArchivaDatabaseException ae )
{
//@todo determine logging to be used
}
catch ( LayoutException le )
{
}
} }
FileContentRecord record = new FileContentRecord();
record.setRepositoryId( this.repository.getId() );
record.setFilename( artifactPath );
records.add( record );
} }
//index.deleteRecords( records );
} }
private void updateDatabase( String path ) private void updateDatabase( String path )
throws ArchivaDatabaseException, LayoutException
{ {
try
{
ArchivaArtifact artifact = layout.toArtifact( path );
ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getClassifier(),
artifact.getType() );
artifactDao.deleteArtifact( queriedArtifact );
}
catch ( ArchivaDatabaseException ae )
{
} ArchivaArtifact artifact = layout.toArtifact( path );
catch ( LayoutException le ) ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
{ artifact.getVersion(), artifact.getClassifier(),
artifact.getType() );
} artifactDao.deleteArtifact( queriedArtifact );
} }
/** /**
@ -147,34 +135,4 @@ public abstract class AbstractRepositoryPurge
return parts; return parts;
} }
public void setRepository( ArchivaRepository repository )
{
this.repository = repository;
}
public void setLayout( BidirectionalRepositoryLayout layout )
{
this.layout = layout;
}
public void setIndex( RepositoryContentIndex index )
{
this.index = index;
}
public void setArtifactDao( ArtifactDAO artifactDao )
{
this.artifactDao = artifactDao;
}
protected ArchivaRepository getRepository()
{
return repository;
}
protected BidirectionalRepositoryLayout getLayout()
{
return layout;
}
} }

View File

@ -19,49 +19,56 @@ package org.apache.maven.archiva.consumers.core.repository;
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader; import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter; import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException; import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.common.utils.VersionUtil; import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.common.utils.VersionComparator; import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator;
import java.util.Date; import java.util.Date;
import java.util.ArrayList;
import java.util.Iterator;
/** /**
* M2 implementation for cleaning up the released snapshots. * M2 implementation for cleaning up the released snapshots.
* *
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/ */
public class DefaultCleanupReleasedSnapshots public class CleanupReleasedSnapshotsRepositoryPurge
extends AbstractRepositoryPurge extends AbstractRepositoryPurge
{ {
public static final String SNAPSHOT = "-SNAPSHOT"; public static final String SNAPSHOT = "-SNAPSHOT";
private RepositoryMetadataReader metadataReader; private RepositoryMetadataReader metadataReader;
public DefaultCleanupReleasedSnapshots() public CleanupReleasedSnapshotsRepositoryPurge( ArchivaRepository repository, BidirectionalRepositoryLayout layout,
ArtifactDAO artifactDao )
{ {
metadataReader = new RepositoryMetadataReader(); super( repository, layout, artifactDao );
metadataReader = new RepositoryMetadataReader();
} }
public void process( String path, Configuration configuration ) public void process( String path )
throws RepositoryPurgeException throws RepositoryPurgeException
{ {
try try
{ {
File artifactFile = new File( getRepository().getUrl().getPath(), path ); File artifactFile = new File( repository.getUrl().getPath(), path );
if ( !artifactFile.exists() ) if ( !artifactFile.exists() )
{ {
@ -93,7 +100,7 @@ public class DefaultCleanupReleasedSnapshots
FileUtils.deleteDirectory( versionDir ); FileUtils.deleteDirectory( versionDir );
updated = true; updated = true;
break; break;
} }
} }
@ -112,10 +119,6 @@ public class DefaultCleanupReleasedSnapshots
{ {
throw new RepositoryPurgeException( ie.getMessage() ); throw new RepositoryPurgeException( ie.getMessage() );
} }
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
} }
private void updateMetadata( File artifactIdDir ) private void updateMetadata( File artifactIdDir )
@ -147,7 +150,8 @@ public class DefaultCleanupReleasedSnapshots
} }
catch ( RepositoryMetadataException rme ) catch ( RepositoryMetadataException rme )
{ {
System.out.println( "Error updating metadata " + metadataFiles[i].getAbsoluteFile() ); // continue updating other metadata files even if there is an exception
// @todo log to console
} }
} }
} }
@ -186,5 +190,4 @@ public class DefaultCleanupReleasedSnapshots
return versions; return versions;
} }
} }

View File

@ -21,31 +21,43 @@ package org.apache.maven.archiva.consumers.core.repository;
import org.apache.maven.archiva.repository.layout.FilenameParts; import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException; import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil; import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.RepositoryConfiguration; import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.configuration.Configuration; import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import java.util.Calendar; import java.util.Calendar;
import java.util.GregorianCalendar; import java.util.GregorianCalendar;
import java.io.File; import java.io.File;
/** /**
* Purge repository for snapshots older than the specified days in the repository configuration.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge" * @version
* role-hint="days-old"
* instantiation-strategy="per-lookup"
*/ */
public class DaysOldRepositoryPurge public class DaysOldRepositoryPurge
extends AbstractRepositoryPurge extends AbstractRepositoryPurge
{ {
private RepositoryConfiguration repoConfig;
public void process( String path, Configuration configuration ) public DaysOldRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
RepositoryConfiguration repoConfig)
{
super( repository, layout, artifactDao );
this.repoConfig = repoConfig;
}
public void process( String path )
throws RepositoryPurgeException throws RepositoryPurgeException
{ {
try try
{ {
File artifactFile = new File( getRepository().getUrl().getPath(), path ); File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() ) if( !artifactFile.exists() )
{ {
@ -56,9 +68,7 @@ public class DaysOldRepositoryPurge
if ( VersionUtil.isSnapshot( parts.version ) ) if ( VersionUtil.isSnapshot( parts.version ) )
{ {
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() ); Calendar olderThanThisDate = Calendar.getInstance();
Calendar olderThanThisDate = new GregorianCalendar();
olderThanThisDate.add( Calendar.DATE, ( -1 * repoConfig.getDaysOlder() ) ); olderThanThisDate.add( Calendar.DATE, ( -1 * repoConfig.getDaysOlder() ) );
if ( artifactFile.lastModified() < olderThanThisDate.getTimeInMillis() ) if ( artifactFile.lastModified() < olderThanThisDate.getTimeInMillis() )
@ -75,10 +85,6 @@ public class DaysOldRepositoryPurge
{ {
throw new RepositoryPurgeException( le.getMessage() ); throw new RepositoryPurgeException( le.getMessage() );
} }
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
} }
} }

View File

@ -37,37 +37,8 @@ public interface RepositoryPurge
* Perform checking on artifact for repository purge * Perform checking on artifact for repository purge
* *
* @param path path to the scanned artifact * @param path path to the scanned artifact
* @param configuration the configuration for the repository currently being scanned
*/ */
public void process( String path, Configuration configuration ) public void process( String path )
throws RepositoryPurgeException; throws RepositoryPurgeException;
/**
* Set the repository to be purged
*
* @param repository
*/
public void setRepository( ArchivaRepository repository );
/**
* Set the layout of the repository to be purged
*
* @param layout
*/
public void setLayout( BidirectionalRepositoryLayout layout );
/**
* Set the index of the repository
*
* @param index
*/
public void setIndex( RepositoryContentIndex index );
/**
* Set the artifact dao used for updating the database of the changes in the repo
*
* @param artifactDao
*/
public void setArtifactDao( ArtifactDAO artifactDao );
} }

View File

@ -73,15 +73,10 @@ public class RepositoryPurgeConsumer
*/ */
private BidirectionalRepositoryLayoutFactory layoutFactory; private BidirectionalRepositoryLayoutFactory layoutFactory;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
private ArchivaDAO dao; private ArchivaDAO dao;
/** /**
* @plexus.requirement * @plexus.requirement
@ -90,16 +85,12 @@ public class RepositoryPurgeConsumer
private ArchivaRepository repository; private ArchivaRepository repository;
private BidirectionalRepositoryLayout repositoryLayout;
private List includes = new ArrayList(); private List includes = new ArrayList();
private List propertyNameTriggers = new ArrayList(); private List propertyNameTriggers = new ArrayList();
private RepositoryPurge repoPurge; private RepositoryPurge repoPurge;
private RepositoryContentIndex index;
private RepositoryPurge cleanUp; private RepositoryPurge cleanUp;
public String getId() public String getId()
@ -130,17 +121,18 @@ public class RepositoryPurgeConsumer
public void beginScan( ArchivaRepository repository ) public void beginScan( ArchivaRepository repository )
throws ConsumerException throws ConsumerException
{ {
BidirectionalRepositoryLayout repositoryLayout = null;
if ( !repository.isManaged() ) if ( !repository.isManaged() )
{ {
throw new ConsumerException( "Consumer requires managed repository." ); throw new ConsumerException( "Consumer requires managed repository." );
} }
this.repository = repository; this.repository = repository;
this.index = indexFactory.createFileContentIndex( repository );
try try
{ {
this.repositoryLayout = layoutFactory.getLayout( this.repository.getLayoutType() ); repositoryLayout = layoutFactory.getLayout( repository.getLayoutType() );
} }
catch ( LayoutException e ) catch ( LayoutException e )
{ {
@ -148,27 +140,18 @@ public class RepositoryPurgeConsumer
"Unable to initialize consumer due to unknown repository layout: " + e.getMessage(), e ); "Unable to initialize consumer due to unknown repository layout: " + e.getMessage(), e );
} }
// @todo handle better injection of RepositoryPurge
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() ); RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.getDaysOlder() != 0 ) if ( repoConfig.getDaysOlder() != 0 )
{ {
repoPurge = new DaysOldRepositoryPurge(); repoPurge = new DaysOldRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
} }
else else
{ {
repoPurge = new RetentionCountRepositoryPurge(); repoPurge =
new RetentionCountRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
} }
repoPurge.setLayout( repositoryLayout );
repoPurge.setRepository( repository );
repoPurge.setIndex( index );
repoPurge.setArtifactDao( dao.getArtifactDAO() );
cleanUp = new DefaultCleanupReleasedSnapshots(); cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO() );
cleanUp.setRepository( repository );
cleanUp.setLayout( repositoryLayout );
cleanUp.setArtifactDao( dao.getArtifactDAO() );
cleanUp.setIndex( index );
} }
public void processFile( String path ) public void processFile( String path )
@ -176,13 +159,14 @@ public class RepositoryPurgeConsumer
{ {
try try
{ {
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() ); RepositoryConfiguration repoConfig =
if( repoConfig.isDeleteReleasedSnapshots() ) configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.isDeleteReleasedSnapshots() )
{ {
cleanUp.process( path, configuration.getConfiguration() ); cleanUp.process( path );
} }
repoPurge.process( path, configuration.getConfiguration() ); repoPurge.process( path );
} }
catch ( RepositoryPurgeException rpe ) catch ( RepositoryPurgeException rpe )
{ {

View File

@ -23,8 +23,11 @@ import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration; import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.repository.layout.FilenameParts; import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException; import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil; import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import java.io.File; import java.io.File;
import java.util.List; import java.util.List;
@ -33,20 +36,30 @@ import java.util.Iterator;
import java.util.Collections; import java.util.Collections;
/** /**
* Purge the repository by retention count. Retain only the specified number of snapshots.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge" * @version
* role-hint="retention-count"
* instantiation-strategy="per-lookup"
*/ */
public class RetentionCountRepositoryPurge public class RetentionCountRepositoryPurge
extends AbstractRepositoryPurge extends AbstractRepositoryPurge
{ {
public void process( String path, Configuration configuration ) private RepositoryConfiguration repoConfig;
public RetentionCountRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
RepositoryConfiguration repoConfig )
{
super( repository, layout, artifactDao );
this.repoConfig = repoConfig;
}
public void process( String path )
throws RepositoryPurgeException throws RepositoryPurgeException
{ {
try try
{ {
File artifactFile = new File( getRepository().getUrl().getPath(), path ); File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() ) if( !artifactFile.exists() )
{ {
@ -56,8 +69,7 @@ public class RetentionCountRepositoryPurge
FilenameParts parts = getFilenameParts( path ); FilenameParts parts = getFilenameParts( path );
if ( VersionUtil.isSnapshot( parts.version ) ) if ( VersionUtil.isSnapshot( parts.version ) )
{ {
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
File parentDir = artifactFile.getParentFile(); File parentDir = artifactFile.getParentFile();
if ( parentDir.isDirectory() ) if ( parentDir.isDirectory() )
@ -87,10 +99,6 @@ public class RetentionCountRepositoryPurge
{ {
throw new RepositoryPurgeException( le.getMessage() ); throw new RepositoryPurgeException( le.getMessage() );
} }
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
} }
private List getUniqueVersions( File[] files ) private List getUniqueVersions( File[] files )

View File

@ -58,7 +58,7 @@ public class AbstractRepositoryPurgeTest
public static final int TEST_DAYS_OLDER = 30; public static final int TEST_DAYS_OLDER = 30;
private Configuration config; private RepositoryConfiguration config;
private ArchivaRepository repo; private ArchivaRepository repo;
@ -136,38 +136,20 @@ public class AbstractRepositoryPurgeTest
dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" ); dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
} }
public void lookupRepositoryPurge( String role ) public RepositoryConfiguration getRepoConfiguration()
throws Exception
{
repoPurge = (RepositoryPurge) lookup( RepositoryPurge.class.getName(), role );
repoPurge.setArtifactDao( dao );
repoPurge.setRepository( getRepository() );
repoPurge.setLayout( getLayout() );
}
public Configuration getRepoConfiguration()
{ {
if ( config == null ) if ( config == null )
{ {
config = new Configuration(); config = new RepositoryConfiguration();
} }
RepositoryConfiguration repoConfig = new RepositoryConfiguration(); config.setId( TEST_REPO_ID );
repoConfig.setId( TEST_REPO_ID ); config.setName( TEST_REPO_NAME );
repoConfig.setName( TEST_REPO_NAME ); config.setDaysOlder( TEST_DAYS_OLDER );
repoConfig.setDaysOlder( TEST_DAYS_OLDER ); config.setUrl( TEST_REPO_URL );
repoConfig.setUrl( TEST_REPO_URL ); config.setReleases( true );
repoConfig.setReleases( true ); config.setSnapshots( true );
repoConfig.setSnapshots( true ); config.setRetentionCount( TEST_RETENTION_COUNT );
repoConfig.setRetentionCount( TEST_RETENTION_COUNT );
List repos = new ArrayList();
repos.add( repoConfig );
config.setRepositories( repos );
return config; return config;
} }

View File

@ -35,7 +35,7 @@ public class DaysOldRepositoryPurgeTest
{ {
super.setUp(); super.setUp();
lookupRepositoryPurge( "days-old" ); repoPurge = new DaysOldRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
} }
private void setLastModified() private void setLastModified()
@ -52,21 +52,11 @@ public class DaysOldRepositoryPurgeTest
public void testIfAJarIsFound() public void testIfAJarIsFound()
throws Exception throws Exception
{ {
// Create it populateDb();
ArchivaArtifact artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
setLastModified(); setLastModified();
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT, getRepoConfiguration() ); repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
assertTrue( true ); assertTrue( true );
@ -90,4 +80,29 @@ public class DaysOldRepositoryPurgeTest
super.tearDown(); super.tearDown();
repoPurge = null; repoPurge = null;
} }
private void populateDb()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
//POM
artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
}
} }

View File

@ -44,7 +44,7 @@ public class RetentionCountRepositoryPurgeTest
{ {
super.setUp(); super.setUp();
lookupRepositoryPurge( "retention-count" ); repoPurge = new RetentionCountRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
} }
/** /**
@ -54,6 +54,134 @@ public class RetentionCountRepositoryPurgeTest
*/ */
public void testIfAJarWasFound() public void testIfAJarWasFound()
throws Exception throws Exception
{
populateIfJarWasFoundDb();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
}
/**
* Test if the artifact to be processed is a pom
*
* @throws Exception
*/
public void testIfAPomWasFound()
throws Exception
{
populateIfPomWasFoundDb();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
}
public void populateIfJarWasFoundDb()
throws Exception
{ {
// Create it // Create it
ArchivaArtifact artifact = ArchivaArtifact artifact =
@ -129,70 +257,9 @@ public class RetentionCountRepositoryPurgeTest
artifact.getModel().setOrigin( "test" ); artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact ); savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact ); assertNotNull( savedArtifact );
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT, getRepoConfiguration() );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
} }
/** public void populateIfPomWasFoundDb()
* Test if the artifact to be processed is a pom
*
* @throws Exception
*/
public void testIfAPomWasFound()
throws Exception throws Exception
{ {
// Create it // Create it
@ -244,60 +311,5 @@ public class RetentionCountRepositoryPurgeTest
artifact.getModel().setOrigin( "test" ); artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact ); savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact ); assertNotNull( savedArtifact );
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM, getRepoConfiguration() );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
} }
} }

View File

@ -20,91 +20,7 @@
<component-set> <component-set>
<components> <components>
<!-- DaysOldRepositoryPurge -->
<component>
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>days-old</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.DaysOldRepositoryPurge</implementation>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements-->
</component>
<!-- LuceneRepositoryContentIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint></role-hint>
</requirement>
</requirements>
</component>
<!-- ArchivaConfiguration -->
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<!-- ArchivaDAO -->
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<!-- JdoAccess --> <!-- JdoAccess -->
<component> <component>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role> <role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
@ -118,58 +34,6 @@
</requirements> </requirements>
</component> </component>
<!-- ArtifactDAO -->
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- ProjectModelDAO -->
<component>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryProblemDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JDO Factory --> <!-- JDO Factory -->
<component> <component>
<role>org.codehaus.plexus.jdo.JdoFactory</role> <role>org.codehaus.plexus.jdo.JdoFactory</role>
@ -186,38 +50,6 @@
<name>javax.jdo.PersistenceManagerFactoryClass</name> <name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value> <value>org.jpox.PersistenceManagerFactoryImpl</value>
</property> </property>
<!--property>
<name>org.jpox.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RetainValues</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RestoreValues</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>true</value>
</property-->
</otherProperties> </otherProperties>
</configuration> </configuration>
</component> </component>

View File

@ -19,91 +19,7 @@
--> -->
<component-set> <component-set>
<components> <components>
<!-- RetentionCountRepositoryPurge -->
<component>
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>retention-count</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.RetentionCountRepositoryPurge</implementation>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements-->
</component>
<!-- LuceneRepositoryContentIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint></role-hint>
</requirement>
</requirements>
</component>
<!-- ArchivaConfiguration -->
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<!-- ArchivaDAO -->
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<!-- JdoAccess --> <!-- JdoAccess -->
<component> <component>
@ -118,58 +34,6 @@
</requirements> </requirements>
</component> </component>
<!-- ArtifactDAO -->
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- ProjectModelDAO -->
<component>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryProblemDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JDO Factory --> <!-- JDO Factory -->
<component> <component>
<role>org.codehaus.plexus.jdo.JdoFactory</role> <role>org.codehaus.plexus.jdo.JdoFactory</role>
@ -186,38 +50,6 @@
<name>javax.jdo.PersistenceManagerFactoryClass</name> <name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value> <value>org.jpox.PersistenceManagerFactoryImpl</value>
</property> </property>
<property>
<name>org.jpox.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RetainValues</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RestoreValues</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>true</value>
</property>
</otherProperties> </otherProperties>
</configuration> </configuration>
</component> </component>