[MRM-294]

- Changed DefaultCleanupReleasedSnapshots to CleanupReleasedSnapshotsRepositoryPurge
- Removed unneeded fields in RepositoryPurgeConsumer
- Added constructor for DaysOldRepositoryPurge, RetentionCountRepositoryPurge and CleanupReleasedSnapshotsRepositoryPurge
- Updated test classes and test resources



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@563064 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2007-08-06 08:22:35 +00:00
parent 45ad3a396b
commit 7db1bd8f9d
11 changed files with 279 additions and 676 deletions

View File

@ -19,7 +19,6 @@ package org.apache.maven.archiva.consumers.core.repository;
* under the License.
*/
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.layout.FilenameParts;
@ -28,29 +27,33 @@ import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public abstract class AbstractRepositoryPurge
implements RepositoryPurge
{
private ArchivaRepository repository;
protected ArchivaRepository repository;
private BidirectionalRepositoryLayout layout;
protected BidirectionalRepositoryLayout layout;
private RepositoryContentIndex index;
protected ArtifactDAO artifactDao;
private ArtifactDAO artifactDao;
public AbstractRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao )
{
this.repository = repository;
this.layout = layout;
this.artifactDao = artifactDao;
}
/**
* Get all files from the directory that matches the specified filename.
@ -60,23 +63,14 @@ public abstract class AbstractRepositoryPurge
* @return
*/
protected File[] getFiles( File dir, String filename )
throws RepositoryPurgeException
{
FilenameFilter filter = new ArtifactFilenameFilter( filename );
if ( !dir.isDirectory() )
{
System.out.println( "File is not a directory." );
}
File[] files = dir.listFiles( filter );
return files;
}
public abstract void process( String path, Configuration configuration )
throws RepositoryPurgeException;
/**
* Purge the repo. Update db and index of removed artifacts.
*
@ -84,34 +78,37 @@ public abstract class AbstractRepositoryPurge
* @throws RepositoryIndexException
*/
protected void purge( File[] artifactFiles )
throws RepositoryIndexException
{
List records = new ArrayList();
for ( int i = 0; i < artifactFiles.length; i++ )
{
artifactFiles[i].delete();
String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( getRepository().getUrl().getPath() );
String[] artifactPathParts = artifactFiles[i].getAbsolutePath().split( repository.getUrl().getPath() );
String artifactPath = artifactPathParts[artifactPathParts.length - 1];
if ( !artifactPath.toUpperCase().endsWith( "SHA1" ) && !artifactPath.toUpperCase().endsWith( "MD5" ) )
{
// intended to be swallowed
// continue updating the database for all artifacts
try
{
updateDatabase( artifactPath );
}
FileContentRecord record = new FileContentRecord();
record.setRepositoryId( this.repository.getId() );
record.setFilename( artifactPath );
records.add( record );
catch ( ArchivaDatabaseException ae )
{
//@todo determine logging to be used
}
catch ( LayoutException le )
{
//index.deleteRecords( records );
}
}
}
}
private void updateDatabase( String path )
throws ArchivaDatabaseException, LayoutException
{
try
{
ArchivaArtifact artifact = layout.toArtifact( path );
ArchivaArtifact queriedArtifact = artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion(), artifact.getClassifier(),
@ -119,15 +116,6 @@ public abstract class AbstractRepositoryPurge
artifactDao.deleteArtifact( queriedArtifact );
}
catch ( ArchivaDatabaseException ae )
{
}
catch ( LayoutException le )
{
}
}
/**
* Get the artifactId, version, extension and classifier from the path parameter
@ -147,34 +135,4 @@ public abstract class AbstractRepositoryPurge
return parts;
}
public void setRepository( ArchivaRepository repository )
{
this.repository = repository;
}
public void setLayout( BidirectionalRepositoryLayout layout )
{
this.layout = layout;
}
public void setIndex( RepositoryContentIndex index )
{
this.index = index;
}
public void setArtifactDao( ArtifactDAO artifactDao )
{
this.artifactDao = artifactDao;
}
protected ArchivaRepository getRepository()
{
return repository;
}
protected BidirectionalRepositoryLayout getLayout()
{
return layout;
}
}

View File

@ -19,49 +19,56 @@ package org.apache.maven.archiva.consumers.core.repository;
* under the License.
*/
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Date;
import java.util.ArrayList;
import java.util.Iterator;
/**
* M2 implementation for cleaning up the released snapshots.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public class DefaultCleanupReleasedSnapshots
public class CleanupReleasedSnapshotsRepositoryPurge
extends AbstractRepositoryPurge
{
public static final String SNAPSHOT = "-SNAPSHOT";
private RepositoryMetadataReader metadataReader;
public DefaultCleanupReleasedSnapshots()
public CleanupReleasedSnapshotsRepositoryPurge( ArchivaRepository repository, BidirectionalRepositoryLayout layout,
ArtifactDAO artifactDao )
{
super( repository, layout, artifactDao );
metadataReader = new RepositoryMetadataReader();
}
public void process( String path, Configuration configuration )
public void process( String path )
throws RepositoryPurgeException
{
try
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );
File artifactFile = new File( repository.getUrl().getPath(), path );
if ( !artifactFile.exists() )
{
@ -112,10 +119,6 @@ public class DefaultCleanupReleasedSnapshots
{
throw new RepositoryPurgeException( ie.getMessage() );
}
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
}
private void updateMetadata( File artifactIdDir )
@ -147,7 +150,8 @@ public class DefaultCleanupReleasedSnapshots
}
catch ( RepositoryMetadataException rme )
{
System.out.println( "Error updating metadata " + metadataFiles[i].getAbsoluteFile() );
// continue updating other metadata files even if there is an exception
// @todo log to console
}
}
}
@ -186,5 +190,4 @@ public class DefaultCleanupReleasedSnapshots
return versions;
}
}

View File

@ -21,31 +21,43 @@ package org.apache.maven.archiva.consumers.core.repository;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.io.File;
/**
* Purge repository for snapshots older than the specified days in the repository configuration.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge"
* role-hint="days-old"
* instantiation-strategy="per-lookup"
* @version
*/
public class DaysOldRepositoryPurge
extends AbstractRepositoryPurge
{
private RepositoryConfiguration repoConfig;
public void process( String path, Configuration configuration )
public DaysOldRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
RepositoryConfiguration repoConfig)
{
super( repository, layout, artifactDao );
this.repoConfig = repoConfig;
}
public void process( String path )
throws RepositoryPurgeException
{
try
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );
File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() )
{
@ -56,9 +68,7 @@ public class DaysOldRepositoryPurge
if ( VersionUtil.isSnapshot( parts.version ) )
{
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
Calendar olderThanThisDate = new GregorianCalendar();
Calendar olderThanThisDate = Calendar.getInstance();
olderThanThisDate.add( Calendar.DATE, ( -1 * repoConfig.getDaysOlder() ) );
if ( artifactFile.lastModified() < olderThanThisDate.getTimeInMillis() )
@ -75,10 +85,6 @@ public class DaysOldRepositoryPurge
{
throw new RepositoryPurgeException( le.getMessage() );
}
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
}
}

View File

@ -37,37 +37,8 @@ public interface RepositoryPurge
* Perform checking on artifact for repository purge
*
* @param path path to the scanned artifact
* @param configuration the configuration for the repository currently being scanned
*/
public void process( String path, Configuration configuration )
public void process( String path )
throws RepositoryPurgeException;
/**
* Set the repository to be purged
*
* @param repository
*/
public void setRepository( ArchivaRepository repository );
/**
* Set the layout of the repository to be purged
*
* @param layout
*/
public void setLayout( BidirectionalRepositoryLayout layout );
/**
* Set the index of the repository
*
* @param index
*/
public void setIndex( RepositoryContentIndex index );
/**
* Set the artifact dao used for updating the database of the changes in the repo
*
* @param artifactDao
*/
public void setArtifactDao( ArtifactDAO artifactDao );
}

View File

@ -73,11 +73,6 @@ public class RepositoryPurgeConsumer
*/
private BidirectionalRepositoryLayoutFactory layoutFactory;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
/**
* @plexus.requirement role-hint="jdo"
*/
@ -90,16 +85,12 @@ public class RepositoryPurgeConsumer
private ArchivaRepository repository;
private BidirectionalRepositoryLayout repositoryLayout;
private List includes = new ArrayList();
private List propertyNameTriggers = new ArrayList();
private RepositoryPurge repoPurge;
private RepositoryContentIndex index;
private RepositoryPurge cleanUp;
public String getId()
@ -130,17 +121,18 @@ public class RepositoryPurgeConsumer
public void beginScan( ArchivaRepository repository )
throws ConsumerException
{
BidirectionalRepositoryLayout repositoryLayout = null;
if ( !repository.isManaged() )
{
throw new ConsumerException( "Consumer requires managed repository." );
}
this.repository = repository;
this.index = indexFactory.createFileContentIndex( repository );
try
{
this.repositoryLayout = layoutFactory.getLayout( this.repository.getLayoutType() );
repositoryLayout = layoutFactory.getLayout( repository.getLayoutType() );
}
catch ( LayoutException e )
{
@ -148,27 +140,18 @@ public class RepositoryPurgeConsumer
"Unable to initialize consumer due to unknown repository layout: " + e.getMessage(), e );
}
// @todo handle better injection of RepositoryPurge
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.getDaysOlder() != 0 )
{
repoPurge = new DaysOldRepositoryPurge();
repoPurge = new DaysOldRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
}
else
{
repoPurge = new RetentionCountRepositoryPurge();
repoPurge =
new RetentionCountRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO(), repoConfig );
}
repoPurge.setLayout( repositoryLayout );
repoPurge.setRepository( repository );
repoPurge.setIndex( index );
repoPurge.setArtifactDao( dao.getArtifactDAO() );
cleanUp = new DefaultCleanupReleasedSnapshots();
cleanUp.setRepository( repository );
cleanUp.setLayout( repositoryLayout );
cleanUp.setArtifactDao( dao.getArtifactDAO() );
cleanUp.setIndex( index );
cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repository, repositoryLayout, dao.getArtifactDAO() );
}
public void processFile( String path )
@ -176,13 +159,14 @@ public class RepositoryPurgeConsumer
{
try
{
RepositoryConfiguration repoConfig = configuration.getConfiguration().findRepositoryById( repository.getId() );
if( repoConfig.isDeleteReleasedSnapshots() )
RepositoryConfiguration repoConfig =
configuration.getConfiguration().findRepositoryById( repository.getId() );
if ( repoConfig.isDeleteReleasedSnapshots() )
{
cleanUp.process( path, configuration.getConfiguration() );
cleanUp.process( path );
}
repoPurge.process( path, configuration.getConfiguration() );
repoPurge.process( path );
}
catch ( RepositoryPurgeException rpe )
{

View File

@ -23,8 +23,11 @@ import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.repository.layout.FilenameParts;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.database.ArtifactDAO;
import java.io.File;
import java.util.List;
@ -33,20 +36,30 @@ import java.util.Iterator;
import java.util.Collections;
/**
* Purge the repository by retention count. Retain only the specified number of snapshots.
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @plexus.component role="org.apache.maven.archiva.consumers.core.repository.RepositoryPurge"
* role-hint="retention-count"
* instantiation-strategy="per-lookup"
* @version
*/
public class RetentionCountRepositoryPurge
extends AbstractRepositoryPurge
{
public void process( String path, Configuration configuration )
private RepositoryConfiguration repoConfig;
public RetentionCountRepositoryPurge( ArchivaRepository repository,
BidirectionalRepositoryLayout layout, ArtifactDAO artifactDao,
RepositoryConfiguration repoConfig )
{
super( repository, layout, artifactDao );
this.repoConfig = repoConfig;
}
public void process( String path )
throws RepositoryPurgeException
{
try
{
File artifactFile = new File( getRepository().getUrl().getPath(), path );
File artifactFile = new File( repository.getUrl().getPath(), path );
if( !artifactFile.exists() )
{
@ -57,7 +70,6 @@ public class RetentionCountRepositoryPurge
if ( VersionUtil.isSnapshot( parts.version ) )
{
RepositoryConfiguration repoConfig = configuration.findRepositoryById( getRepository().getId() );
File parentDir = artifactFile.getParentFile();
if ( parentDir.isDirectory() )
@ -87,10 +99,6 @@ public class RetentionCountRepositoryPurge
{
throw new RepositoryPurgeException( le.getMessage() );
}
catch ( RepositoryIndexException re )
{
throw new RepositoryPurgeException( re.getMessage() );
}
}
private List getUniqueVersions( File[] files )

View File

@ -58,7 +58,7 @@ public class AbstractRepositoryPurgeTest
public static final int TEST_DAYS_OLDER = 30;
private Configuration config;
private RepositoryConfiguration config;
private ArchivaRepository repo;
@ -136,38 +136,20 @@ public class AbstractRepositoryPurgeTest
dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
}
public void lookupRepositoryPurge( String role )
throws Exception
{
repoPurge = (RepositoryPurge) lookup( RepositoryPurge.class.getName(), role );
repoPurge.setArtifactDao( dao );
repoPurge.setRepository( getRepository() );
repoPurge.setLayout( getLayout() );
}
public Configuration getRepoConfiguration()
public RepositoryConfiguration getRepoConfiguration()
{
if ( config == null )
{
config = new Configuration();
config = new RepositoryConfiguration();
}
RepositoryConfiguration repoConfig = new RepositoryConfiguration();
repoConfig.setId( TEST_REPO_ID );
repoConfig.setName( TEST_REPO_NAME );
repoConfig.setDaysOlder( TEST_DAYS_OLDER );
repoConfig.setUrl( TEST_REPO_URL );
repoConfig.setReleases( true );
repoConfig.setSnapshots( true );
repoConfig.setRetentionCount( TEST_RETENTION_COUNT );
List repos = new ArrayList();
repos.add( repoConfig );
config.setRepositories( repos );
config.setId( TEST_REPO_ID );
config.setName( TEST_REPO_NAME );
config.setDaysOlder( TEST_DAYS_OLDER );
config.setUrl( TEST_REPO_URL );
config.setReleases( true );
config.setSnapshots( true );
config.setRetentionCount( TEST_RETENTION_COUNT );
return config;
}

View File

@ -35,7 +35,7 @@ public class DaysOldRepositoryPurgeTest
{
super.setUp();
lookupRepositoryPurge( "days-old" );
repoPurge = new DaysOldRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
}
private void setLastModified()
@ -52,21 +52,11 @@ public class DaysOldRepositoryPurgeTest
public void testIfAJarIsFound()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
populateDb();
setLastModified();
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT, getRepoConfiguration() );
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
assertTrue( true );
@ -90,4 +80,29 @@ public class DaysOldRepositoryPurgeTest
super.tearDown();
repoPurge = null;
}
private void populateDb()
throws Exception
{
// Create it
ArchivaArtifact artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
// Save it.
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
//POM
artifact =
dao.createArtifact( "org.apache.maven.plugins", "maven-install-plugin", "2.2-SNAPSHOT", "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
}
}

View File

@ -44,7 +44,7 @@ public class RetentionCountRepositoryPurgeTest
{
super.setUp();
lookupRepositoryPurge( "retention-count" );
repoPurge = new RetentionCountRepositoryPurge( getRepository(), getLayout(), dao, getRepoConfiguration() );
}
/**
@ -54,6 +54,134 @@ public class RetentionCountRepositoryPurgeTest
*/
public void testIfAJarWasFound()
throws Exception
{
populateIfJarWasFoundDb();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
}
/**
* Test if the artifact to be processed is a pom
*
* @throws Exception
*/
public void testIfAPomWasFound()
throws Exception
{
populateIfPomWasFoundDb();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
}
public void populateIfJarWasFoundDb()
throws Exception
{
// Create it
ArchivaArtifact artifact =
@ -129,70 +257,9 @@ public class RetentionCountRepositoryPurgeTest
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT, getRepoConfiguration() );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.pom.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.160758-2.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070505.090015-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070506.090132-4.pom.sha1" ).exists() );
}
/**
* Test if the artifact to be processed is a pom
*
* @throws Exception
*/
public void testIfAPomWasFound()
public void populateIfPomWasFoundDb()
throws Exception
{
// Create it
@ -244,60 +311,5 @@ public class RetentionCountRepositoryPurgeTest
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM, getRepoConfiguration() );
// assert if removed from repo
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.jar.sha1" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.md5" ).exists() );
assertFalse( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070427.065136-1.pom.sha1" ).exists() );
// assert if not removed from repo
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070615.105019-3-sources.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.jar.sha1" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.md5" ).exists() );
assertTrue( new File(
"target/test-classes/test-repo/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2-sources.jar.sha1" ).exists() );
}
}

View File

@ -21,90 +21,6 @@
<component-set>
<components>
<!-- DaysOldRepositoryPurge -->
<component>
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>days-old</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.DaysOldRepositoryPurge</implementation>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements-->
</component>
<!-- LuceneRepositoryContentIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint></role-hint>
</requirement>
</requirements>
</component>
<!-- ArchivaConfiguration -->
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<!-- ArchivaDAO -->
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<!-- JdoAccess -->
<component>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
@ -118,58 +34,6 @@
</requirements>
</component>
<!-- ArtifactDAO -->
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- ProjectModelDAO -->
<component>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryProblemDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JDO Factory -->
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
@ -186,38 +50,6 @@
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
<!--property>
<name>org.jpox.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RetainValues</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RestoreValues</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>true</value>
</property-->
</otherProperties>
</configuration>
</component>

View File

@ -21,90 +21,6 @@
<component-set>
<components>
<!-- RetentionCountRepositoryPurge -->
<component>
<role>org.apache.maven.archiva.consumers.core.repository.RepositoryPurge</role>
<role-hint>retention-count</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.RetentionCountRepositoryPurge</implementation>
<!--requirements>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
</requirement>
</requirements-->
</component>
<!-- LuceneRepositoryContentIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint></role-hint>
</requirement>
</requirements>
</component>
<!-- ArchivaConfiguration -->
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<!-- ArchivaDAO -->
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<!-- JdoAccess -->
<component>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
@ -118,58 +34,6 @@
</requirements>
</component>
<!-- ArtifactDAO -->
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- ProjectModelDAO -->
<component>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- RepositoryProblemDAO -->
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JDO Factory -->
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
@ -186,38 +50,6 @@
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
<property>
<name>org.jpox.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.poid.transactionIsolation</name>
<value>READ_COMMITTED</value>
</property>
<property>
<name>org.jpox.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RetainValues</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.RestoreValues</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateTables</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateColumns</name>
<value>true</value>
</property>
<property>
<name>org.jpox.validateConstraints</name>
<value>true</value>
</property>
</otherProperties>
</configuration>
</component>