[MRM-948] remove direct use of database and indexer from the core consumers

git-svn-id: https://svn.apache.org/repos/asf/archiva/branches@694625 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2008-09-12 07:13:04 +00:00
parent 474737fc4b
commit a4e0310100
24 changed files with 692 additions and 835 deletions

View File

@ -28,14 +28,6 @@
<name>Archiva Consumers :: Core Consumers</name> <name>Archiva Consumers :: Core Consumers</name>
<dependencies> <dependencies>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-indexer</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId> <artifactId>archiva-configuration</artifactId>

View File

@ -19,25 +19,15 @@
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.database.ArchivaDatabaseException; import java.io.File;
import org.apache.maven.archiva.database.ArtifactDAO; import java.io.FilenameFilter;
import org.apache.maven.archiva.indexer.RepositoryContentIndex; import java.util.List;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import java.util.Set;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference; import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent; import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.layout.LayoutException; import org.apache.maven.archiva.repository.events.RepositoryListener;
import java.io.File;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** /**
* Base class for all repository purge tasks. * Base class for all repository purge tasks.
@ -47,18 +37,14 @@
public abstract class AbstractRepositoryPurge public abstract class AbstractRepositoryPurge
implements RepositoryPurge implements RepositoryPurge
{ {
protected ManagedRepositoryContent repository; protected final ManagedRepositoryContent repository;
protected final List<RepositoryListener> listeners;
protected ArtifactDAO artifactDao; public AbstractRepositoryPurge( ManagedRepositoryContent repository, List<RepositoryListener> listeners )
private Map<String, RepositoryContentIndex> indices;
public AbstractRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao,
Map<String, RepositoryContentIndex> indices )
{ {
this.repository = repository; this.repository = repository;
this.artifactDao = artifactDao; this.listeners = listeners;
this.indices = indices;
} }
/** /**
@ -98,10 +84,6 @@ protected void purge( Set<ArtifactReference> references )
{ {
if( references != null && !references.isEmpty() ) if( references != null && !references.isEmpty() )
{ {
List<LuceneRepositoryContentRecord> fileContentRecords = new ArrayList<LuceneRepositoryContentRecord>();
List<LuceneRepositoryContentRecord> hashcodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
List<LuceneRepositoryContentRecord> bytecodeRecords = new ArrayList<LuceneRepositoryContentRecord>();
for ( ArtifactReference reference : references ) for ( ArtifactReference reference : references )
{ {
File artifactFile = repository.toFile( reference ); File artifactFile = repository.toFile( reference );
@ -110,46 +92,14 @@ protected void purge( Set<ArtifactReference> references )
new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(), new ArchivaArtifact( reference.getGroupId(), reference.getArtifactId(), reference.getVersion(),
reference.getClassifier(), reference.getType() ); reference.getClassifier(), reference.getType() );
FileContentRecord fileContentRecord = new FileContentRecord(); for ( RepositoryListener listener : listeners )
fileContentRecord.setFilename( repository.toPath( artifact ) ); {
fileContentRecords.add( fileContentRecord ); listener.deleteArtifact( repository, artifact );
}
HashcodesRecord hashcodesRecord = new HashcodesRecord();
hashcodesRecord.setArtifact( artifact );
hashcodeRecords.add( hashcodesRecord );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
bytecodeRecord.setArtifact( artifact );
bytecodeRecords.add( bytecodeRecord );
// TODO: this needs to be logged // TODO: this needs to be logged
artifactFile.delete(); artifactFile.delete();
purgeSupportFiles( artifactFile ); purgeSupportFiles( artifactFile );
// intended to be swallowed
// continue updating the database for all artifacts
try
{
String artifactPath = toRelativePath( artifactFile );
updateDatabase( artifactPath );
}
catch ( ArchivaDatabaseException ae )
{
// TODO: determine logging to be used
}
catch ( LayoutException le )
{
// Ignore
}
}
try
{
updateIndices( fileContentRecords, hashcodeRecords, bytecodeRecords );
}
catch ( RepositoryIndexException e )
{
// Ignore
} }
} }
} }
@ -186,32 +136,4 @@ private void purgeSupportFiles( File artifactFile )
} }
} }
} }
private void updateDatabase( String path )
throws ArchivaDatabaseException, LayoutException
{
ArtifactReference artifact = repository.toArtifactReference( path );
ArchivaArtifact queriedArtifact =
artifactDao.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
artifact.getClassifier(), artifact.getType() );
artifactDao.deleteArtifact( queriedArtifact );
// TODO [MRM-37]: re-run the database consumers to clean up
}
private void updateIndices( List<LuceneRepositoryContentRecord> fileContentRecords,
List<LuceneRepositoryContentRecord> hashcodeRecords,
List<LuceneRepositoryContentRecord> bytecodeRecords )
throws RepositoryIndexException
{
RepositoryContentIndex index = indices.get( "filecontent" );
index.deleteRecords( fileContentRecords );
index = indices.get( "hashcodes" );
index.deleteRecords( hashcodeRecords );
index = indices.get( "bytecode" );
index.deleteRecords( bytecodeRecords );
}
} }

View File

@ -19,12 +19,17 @@
* under the License. * under the License.
*/ */
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.maven.archiva.common.utils.VersionComparator; import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil; import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArtifactDAO; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference; import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference; import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.model.VersionedReference; import org.apache.maven.archiva.model.VersionedReference;
@ -33,17 +38,11 @@
import org.apache.maven.archiva.repository.RepositoryContentFactory; import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException; import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException; import org.apache.maven.archiva.repository.RepositoryNotFoundException;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException; import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.archiva.repository.metadata.MetadataTools; import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException; import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/** /**
* <p> * <p>
* This will look in a single managed repository, and purge any snapshots that are present * This will look in a single managed repository, and purge any snapshots that are present
@ -78,11 +77,12 @@ public class CleanupReleasedSnapshotsRepositoryPurge
private RepositoryContentFactory repoContentFactory; private RepositoryContentFactory repoContentFactory;
public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao, public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, MetadataTools metadataTools,
MetadataTools metadataTools, Map<String, RepositoryContentIndex> indices, ArchivaConfiguration archivaConfig,
ArchivaConfiguration archivaConfig, RepositoryContentFactory repoContentFactory ) RepositoryContentFactory repoContentFactory,
List<RepositoryListener> listeners )
{ {
super( repository, artifactDao, indices ); super( repository, listeners );
this.metadataTools = metadataTools; this.metadataTools = metadataTools;
this.archivaConfig = archivaConfig; this.archivaConfig = archivaConfig;
this.repoContentFactory = repoContentFactory; this.repoContentFactory = repoContentFactory;
@ -101,17 +101,17 @@ public void process( String path )
return; return;
} }
ArtifactReference artifact = repository.toArtifactReference( path ); ArtifactReference artifactRef = repository.toArtifactReference( path );
if ( !VersionUtil.isSnapshot( artifact.getVersion() ) ) if ( !VersionUtil.isSnapshot( artifactRef.getVersion() ) )
{ {
// Nothing to do here, not a snapshot, skip it. // Nothing to do here, not a snapshot, skip it.
return; return;
} }
ProjectReference reference = new ProjectReference(); ProjectReference reference = new ProjectReference();
reference.setGroupId( artifact.getGroupId() ); reference.setGroupId( artifactRef.getGroupId() );
reference.setArtifactId( artifact.getArtifactId() ); reference.setArtifactId( artifactRef.getArtifactId() );
// Gather up all of the versions. // Gather up all of the versions.
List<String> allVersions = new ArrayList<String>( repository.getVersions( reference ) ); List<String> allVersions = new ArrayList<String>( repository.getVersions( reference ) );
@ -161,8 +161,12 @@ public void process( String path )
boolean needsMetadataUpdate = false; boolean needsMetadataUpdate = false;
VersionedReference versionRef = new VersionedReference(); VersionedReference versionRef = new VersionedReference();
versionRef.setGroupId( artifact.getGroupId() ); versionRef.setGroupId( artifactRef.getGroupId() );
versionRef.setArtifactId( artifact.getArtifactId() ); versionRef.setArtifactId( artifactRef.getArtifactId() );
ArchivaArtifact artifact =
new ArchivaArtifact( artifactRef.getGroupId(), artifactRef.getArtifactId(), artifactRef.getVersion(),
artifactRef.getClassifier(), artifactRef.getType() );
for ( String version : snapshotVersions ) for ( String version : snapshotVersions )
{ {
@ -170,13 +174,19 @@ public void process( String path )
{ {
versionRef.setVersion( version ); versionRef.setVersion( version );
repository.deleteVersion( versionRef ); repository.deleteVersion( versionRef );
for ( RepositoryListener listener : listeners )
{
listener.deleteArtifact( repository, artifact );
}
needsMetadataUpdate = true; needsMetadataUpdate = true;
} }
} }
if ( needsMetadataUpdate ) if ( needsMetadataUpdate )
{ {
updateMetadata( artifact ); updateMetadata( artifactRef );
} }
} }
catch ( LayoutException e ) catch ( LayoutException e )

View File

@ -19,17 +19,6 @@
* under the License. * under the License.
*/ */
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.layout.LayoutException;
import java.io.File; import java.io.File;
import java.text.ParseException; import java.text.ParseException;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
@ -38,10 +27,19 @@
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
/** /**
* Purge from repository all snapshots older than the specified days in the repository configuration. * Purge from repository all snapshots older than the specified days in the repository configuration.
* *
@ -56,10 +54,10 @@ public class DaysOldRepositoryPurge
private int retentionCount; private int retentionCount;
public DaysOldRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao, int daysOlder, public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder,
int retentionCount, Map<String, RepositoryContentIndex> indices ) int retentionCount, List<RepositoryListener> listeners )
{ {
super( repository, artifactDao, indices ); super( repository, listeners );
this.daysOlder = daysOlder; this.daysOlder = daysOlder;
this.retentionCount = retentionCount; this.retentionCount = retentionCount;
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" ); timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );

View File

@ -19,6 +19,10 @@
* under the License. * under the License.
*/ */
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames; import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes; import org.apache.maven.archiva.configuration.FileTypes;
@ -26,24 +30,18 @@
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer; import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException; import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.repository.ManagedRepositoryContent; import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory; import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException; import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.RepositoryNotFoundException; import org.apache.maven.archiva.repository.RepositoryNotFoundException;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools; import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList; import edu.emory.mathcs.backport.java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** /**
* Consumer for removing old snapshots in the repository based on the criteria * Consumer for removing old snapshots in the repository based on the criteria
@ -75,11 +73,6 @@ public class RepositoryPurgeConsumer
*/ */
private ArchivaConfiguration configuration; private ArchivaConfiguration configuration;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -97,19 +90,15 @@ public class RepositoryPurgeConsumer
private List<String> includes = new ArrayList<String>(); private List<String> includes = new ArrayList<String>();
private List<String> propertyNameTriggers = new ArrayList<String>();
private RepositoryPurge repoPurge; private RepositoryPurge repoPurge;
private RepositoryPurge cleanUp; private RepositoryPurge cleanUp;
private boolean deleteReleasedSnapshots; private boolean deleteReleasedSnapshots;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
/** @plexus.requirement role="org.apache.maven.archiva.repository.events.RepositoryListener" */
private List<RepositoryListener> listeners = Collections.emptyList();
public String getId() public String getId()
{ {
return this.id; return this.id;
@ -140,27 +129,23 @@ public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathe
{ {
try try
{ {
Map<String, RepositoryContentIndex> indices = new HashMap<String, RepositoryContentIndex>();
indices.put( "bytecode", indexFactory.createBytecodeIndex( repository ) );
indices.put( "hashcodes", indexFactory.createHashcodeIndex( repository ) );
indices.put( "filecontent", indexFactory.createFileContentIndex( repository ) );
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
.getId() ); .getId() );
if ( repository.getDaysOlder() != 0 ) if ( repository.getDaysOlder() != 0 )
{ {
repoPurge = new DaysOldRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
.getDaysOlder(), repository.getRetentionCount(), indices ); repository.getRetentionCount(), listeners );
} }
else else
{ {
repoPurge = new RetentionCountRepositoryPurge( repositoryContent, dao.getArtifactDAO(), repository repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
.getRetentionCount(), indices ); listeners );
} }
cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, dao.getArtifactDAO(), cleanUp =
metadataTools, indices, configuration, repositoryFactory ); new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
repositoryFactory, listeners );
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots(); deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
} }
@ -230,9 +215,4 @@ public boolean isProcessUnmodified()
// we need to check all files for deletion, especially if not modified // we need to check all files for deletion, especially if not modified
return true; return true;
} }
public void setRepositoryContentIndexFactory( RepositoryContentIndexFactory indexFactory )
{
this.indexFactory = indexFactory;
}
} }

View File

@ -19,23 +19,21 @@
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.layout.LayoutException;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.apache.maven.archiva.repository.ContentNotFoundException;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.layout.LayoutException;
/** /**
* Purge the repository by retention count. Retain only the specified number of snapshots. * Purge the repository by retention count. Retain only the specified number of snapshots.
* *
@ -46,10 +44,10 @@ public class RetentionCountRepositoryPurge
{ {
private int retentionCount; private int retentionCount;
public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, ArtifactDAO artifactDao, public RetentionCountRepositoryPurge( ManagedRepositoryContent repository,
int retentionCount, Map<String, RepositoryContentIndex> indices ) int retentionCount, List<RepositoryListener> listeners )
{ {
super( repository, artifactDao, indices ); super( repository, listeners );
this.retentionCount = retentionCount; this.retentionCount = retentionCount;
} }

View File

@ -19,28 +19,16 @@
* under the License. * under the License.
*/ */
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.jpox.SchemaTool;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import java.util.Map.Entry;
import javax.jdo.PersistenceManager; import org.apache.commons.io.FileUtils;
import javax.jdo.PersistenceManagerFactory; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
/** /**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -74,74 +62,21 @@ public abstract class AbstractRepositoryPurgeTest
private ManagedRepositoryContent repo; private ManagedRepositoryContent repo;
protected ArtifactDAO dao;
protected RepositoryPurge repoPurge; protected RepositoryPurge repoPurge;
protected MockControl listenerControl;
protected RepositoryListener listener;
@Override
protected void setUp() protected void setUp()
throws Exception throws Exception
{ {
super.setUp(); super.setUp();
listenerControl = MockControl.createControl( RepositoryListener.class );
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" ); listener = (RepositoryListener) listenerControl.getMock();
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:testdb" ) );
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
jdoFactory.setProperty( "javax.jdo.option.RestoreValues", "true" );
// jdoFactory.setProperty( "org.jpox.autoCreateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateTables", "true" );
jdoFactory.setProperty( "org.jpox.validateColumns", "true" );
jdoFactory.setProperty( "org.jpox.validateConstraints", "true" );
Properties properties = jdoFactory.getProperties();
for ( Entry<Object, Object> entry : properties.entrySet() )
{
System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
}
URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{
fail( "Unable to process test " + getName() + " - missing package.jdo." );
}
File propsFile = null; // intentional
boolean verbose = true;
SchemaTool.deleteSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose );
SchemaTool.createSchemaTables( jdoFileUrls, new URL[] {}, propsFile, verbose, null );
PersistenceManagerFactory pmf = jdoFactory.getPersistenceManagerFactory();
assertNotNull( pmf );
PersistenceManager pm = pmf.getPersistenceManager();
pm.close();
dao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
} }
@Override @Override
@ -180,28 +115,6 @@ public ManagedRepositoryContent getRepository()
return repo; return repo;
} }
protected void populateDb( String groupId, String artifactId, List<String> versions )
throws ArchivaDatabaseException
{
for ( String version : versions )
{
ArchivaArtifact artifact = dao.createArtifact( groupId, artifactId, version, "", "jar" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
ArchivaArtifact savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
//POM
artifact = dao.createArtifact( groupId, artifactId, version, "", "pom" );
assertNotNull( artifact );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setOrigin( "test" );
savedArtifact = dao.saveArtifact( artifact );
assertNotNull( savedArtifact );
}
}
protected void assertDeleted( String path ) protected void assertDeleted( String path )
{ {
assertFalse( "File should have been deleted: " + path, new File( path ).exists() ); assertFalse( "File should have been deleted: " + path, new File( path ).exists() );
@ -230,15 +143,9 @@ protected String prepareTestRepos()
return testDir.getAbsolutePath(); return testDir.getAbsolutePath();
} }
protected void populateDbForTestOrderOfDeletion() protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
throws Exception
{ {
List<String> versions = new ArrayList<String>(); return new ArchivaArtifact( groupId, artifactId, version, null, type );
versions.add( "1.1.2-20070427.065136-1" );
versions.add( "1.1.2-20070506.163513-2" );
versions.add( "1.1.2-20070615.105019-3" );
populateDb( "org.apache.maven.plugins", "maven-assembly-plugin", versions );
} }
} }

View File

@ -19,21 +19,18 @@
* under the License. * under the License.
*/ */
import java.io.File;
import java.util.Collections;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration; import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.repository.RepositoryContentFactory; import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.apache.maven.archiva.repository.metadata.MetadataTools; import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.custommonkey.xmlunit.XMLAssert; import org.custommonkey.xmlunit.XMLAssert;
import org.easymock.MockControl;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** /**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -42,6 +39,8 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
extends AbstractRepositoryPurgeTest extends AbstractRepositoryPurgeTest
{ {
private ArchivaConfiguration archivaConfiguration; private ArchivaConfiguration archivaConfiguration;
private MockControl listenerControl;
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO = public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar"; "org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
@ -49,40 +48,45 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar"; public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar"; public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
private RepositoryListener listener;
protected void setUp() protected void setUp()
throws Exception throws Exception
{ {
super.setUp(); super.setUp();
Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub() );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub() );
map.put( "bytecode", new LuceneRepositoryContentIndexStub() );
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class ); MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots"); RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots");
archivaConfiguration = archivaConfiguration =
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" ); (ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" );
listenerControl = MockControl.createControl( RepositoryListener.class );
listener = (RepositoryListener) listenerControl.getMock();
repoPurge = repoPurge =
new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), dao, metadataTools, map, archivaConfiguration, factory ); new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration, factory,
Collections.singletonList( listener ) );
} }
public void testReleasedSnapshotsExistsInSameRepo() public void testReleasedSnapshotsExistsInSameRepo()
throws Exception throws Exception
{ {
Configuration config = archivaConfiguration.getConfiguration(); Configuration config = archivaConfiguration.getConfiguration();
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) ); config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) ); config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
populateReleasedSnapshotsTest();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-plugin-plugin",
"2.3-SNAPSHOT", "maven-plugin" ) );
listenerControl.replay();
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO ); repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin"; String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
@ -130,12 +134,18 @@ public void testReleasedSnapshotsExistsInDifferentRepo()
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) ); config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) ); config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) );
populateReleasedSnapshotsTestInDiffRepo();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.archiva",
"released-artifact-in-diff-repo", "1.0-SNAPSHOT",
"jar" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO ); repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO );
listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo"; String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo";
// check if the snapshot was removed // check if the snapshot was removed
@ -168,11 +178,14 @@ public void testHigherSnapshotExistsInSameRepo()
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) ); config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) ); config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
populateHigherSnapshotExistsTest();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts - no deletions
listenerControl.replay();
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO ); repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO );
listenerControl.verify();
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin"; String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin";
@ -207,32 +220,4 @@ public void testHigherSnapshotExistsInSameRepo()
"//metadata/versioning/versions/version", metadataXml ); "//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070427033345", "//metadata/versioning/lastUpdated", metadataXml ); XMLAssert.assertXpathEvaluatesTo( "20070427033345", "//metadata/versioning/lastUpdated", metadataXml );
} }
private void populateReleasedSnapshotsTest()
throws ArchivaDatabaseException
{
List<String> versions = new ArrayList<String>();
versions.add( "2.3-SNAPSHOT" );
populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
}
private void populateHigherSnapshotExistsTest()
throws Exception
{
List<String> versions = new ArrayList<String>();
versions.add( "2.0.3-SNAPSHOT" );
populateDb( "org.apache.maven.plugins", "maven-source-plugin", versions );
}
private void populateReleasedSnapshotsTestInDiffRepo()
throws ArchivaDatabaseException
{
List<String> versions = new ArrayList<String>();
versions.add( "1.0-SNAPSHOT" );
populateDb( "org.apache.archiva", "released-artifact-in-diff-repo", versions );
}
} }

View File

@ -19,17 +19,14 @@
* under the License. * under the License.
*/ */
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.HashMap; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import org.apache.commons.lang.time.DateUtils;
/** /**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
@ -37,9 +34,6 @@
public class DaysOldRepositoryPurgeTest public class DaysOldRepositoryPurgeTest
extends AbstractRepositoryPurgeTest extends AbstractRepositoryPurgeTest
{ {
private Map<String, RepositoryContentIndex> map;
private static final String[] extensions = private static final String[] extensions =
new String[] { "-5.jar", "-5.pom", "-6.jar", "-6.pom", "-7.jar", "-7.pom" }; new String[] { "-5.jar", "-5.pom", "-6.jar", "-6.pom", "-7.jar", "-7.pom" };
@ -55,12 +49,6 @@ public class DaysOldRepositoryPurgeTest
private String sec; private String sec;
protected void setUp()
throws Exception
{
super.setUp();
}
private void setLastModified( String dirPath, long lastModified ) private void setLastModified( String dirPath, long lastModified )
{ {
File dir = new File( dirPath ); File dir = new File( dirPath );
@ -74,14 +62,11 @@ private void setLastModified( String dirPath, long lastModified )
public void testByLastModified() public void testByLastModified()
throws Exception throws Exception
{ {
map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
repoPurge = repoPurge =
new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(), new DaysOldRepositoryPurge( getRepository(),
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map ); getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
@ -89,9 +74,16 @@ public void testByLastModified()
setLastModified( projectRoot + "/2.2-SNAPSHOT/", 1179382029 ); setLastModified( projectRoot + "/2.2-SNAPSHOT/", 1179382029 );
populateDbForTestByLastModified(); // test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
"2.2-SNAPSHOT", "maven-plugin" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-install-plugin",
"2.2-SNAPSHOT", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT ); repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
listenerControl.verify();
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar" ); assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar" );
assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.md5" ); assertDeleted( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-SNAPSHOT.jar.md5" );
@ -119,14 +111,10 @@ public void testByLastModified()
public void testOrderOfDeletion() public void testOrderOfDeletion()
throws Exception throws Exception
{ {
map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
repoPurge = repoPurge =
new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(), new DaysOldRepositoryPurge( getRepository(), getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map ); getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
@ -134,10 +122,17 @@ public void testOrderOfDeletion()
setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", 1179382029 ); setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", 1179382029 );
populateDbForTestOrderOfDeletion(); // test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
"1.1.2-20070427.065136-1", "maven-plugin" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
"1.1.2-20070427.065136-1", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION ); repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
listenerControl.verify();
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" ); assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" ); assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" );
assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" ); assertDeleted( projectRoot + "/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" );
@ -164,14 +159,11 @@ public void testOrderOfDeletion()
public void testMetadataDrivenSnapshots() public void testMetadataDrivenSnapshots()
throws Exception throws Exception
{ {
map = new HashMap<String, RepositoryContentIndex>();
map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) );
map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) );
repoPurge = repoPurge =
new DaysOldRepositoryPurge( getRepository(), dao, getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(), new DaysOldRepositoryPurge( getRepository(),
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map ); getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
Collections.singletonList( listener ) );
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
@ -221,10 +213,17 @@ public void testMetadataDrivenSnapshots()
versions.add( "1.4.3-" + year + mon + day + "." + hr + min + sec + "-7" ); versions.add( "1.4.3-" + year + mon + day + "." + hr + min + sec + "-7" );
versions.add( "1.4.3-SNAPSHOT" ); versions.add( "1.4.3-SNAPSHOT" );
populateDb( "org.codehaus.plexus", "plexus-utils", versions ); // test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
"1.4.3-20070113.163208-4", "jar" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.plexus", "plexus-utils",
"1.4.3-20070113.163208-4", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT ); repoPurge.process( PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT );
listenerControl.verify();
// this should be deleted since the filename version (timestamp) is older than // this should be deleted since the filename version (timestamp) is older than
// 100 days even if the last modified date was <100 days ago // 100 days even if the last modified date was <100 days ago
assertDeleted( versionRoot + "/plexus-utils-1.4.3-20070113.163208-4.jar" ); assertDeleted( versionRoot + "/plexus-utils-1.4.3-20070113.163208-4.jar" );
@ -260,15 +259,4 @@ protected void tearDown()
super.tearDown(); super.tearDown();
repoPurge = null; repoPurge = null;
} }
private void populateDbForTestByLastModified()
throws Exception
{
List<String> versions = new ArrayList<String>();
versions.add( "2.2-20061118.060401-2" );
versions.add( "2.2-20070513.034619-5" );
versions.add( "2.2-SNAPSHOT" );
populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
}
} }

View File

@ -19,6 +19,8 @@
* under the License. * under the License.
*/ */
import java.io.File;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.BaseFile; import org.apache.maven.archiva.common.utils.BaseFile;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
@ -27,15 +29,9 @@
import org.apache.maven.archiva.configuration.FileTypes; import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate; import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.custommonkey.xmlunit.XMLAssert; import org.custommonkey.xmlunit.XMLAssert;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/** /**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a> * @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
*/ */
@ -99,13 +95,6 @@ public void testConsumerByRetentionCount()
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" ); "repo-purge-consumer-by-retention-count" );
LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
indexFactory.setExpectedRecordsSize( 2 );
( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
populateDbForRetentionCountTest();
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ); ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute. repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
repoConfiguration.setRetentionCount( TEST_RETENTION_COUNT ); repoConfiguration.setRetentionCount( TEST_RETENTION_COUNT );
@ -163,17 +152,10 @@ private void addRepoToConfiguration( String configHint, ManagedRepositoryConfigu
public void testConsumerByDaysOld() public void testConsumerByDaysOld()
throws Exception throws Exception
{ {
populateDbForDaysOldTest();
KnownRepositoryContentConsumer repoPurgeConsumer = KnownRepositoryContentConsumer repoPurgeConsumer =
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" ); "repo-purge-consumer-by-days-old" );
LuceneRepositoryContentIndexFactoryStub indexFactory = new LuceneRepositoryContentIndexFactoryStub();
indexFactory.setExpectedRecordsSize( 2 );
( (RepositoryPurgeConsumer) repoPurgeConsumer ).setRepositoryContentIndexFactory( indexFactory );
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ); ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDaysOlder( TEST_DAYS_OLDER ); repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
addRepoToConfiguration( "days-old", repoConfiguration ); addRepoToConfiguration( "days-old", repoConfiguration );
@ -222,8 +204,6 @@ public void testReleasedSnapshotsWereNotCleaned()
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-retention-count" ); "repo-purge-consumer-by-retention-count" );
populateDbForReleasedSnapshotsTest();
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ); ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots. repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
addRepoToConfiguration( "retention-count", repoConfiguration ); addRepoToConfiguration( "retention-count", repoConfiguration );
@ -265,8 +245,6 @@ public void testReleasedSnapshotsWereCleaned()
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, (KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
"repo-purge-consumer-by-days-old" ); "repo-purge-consumer-by-days-old" );
populateDbForReleasedSnapshotsTest();
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ); ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
repoConfiguration.setDeleteReleasedSnapshots( true ); repoConfiguration.setDeleteReleasedSnapshots( true );
addRepoToConfiguration( "days-old", repoConfiguration ); addRepoToConfiguration( "days-old", repoConfiguration );
@ -301,34 +279,4 @@ public void testReleasedSnapshotsWereCleaned()
"//metadata/versioning/versions/version", metadataXml ); "//metadata/versioning/versions/version", metadataXml );
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml ); XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
} }
public void populateDbForRetentionCountTest()
throws ArchivaDatabaseException
{
List<String> versions = new ArrayList<String>();
versions.add( "1.0RC1-20070504.153317-1" );
versions.add( "1.0RC1-20070504.160758-2" );
versions.add( "1.0RC1-20070505.090015-3" );
versions.add( "1.0RC1-20070506.090132-4" );
populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
}
private void populateDbForDaysOldTest()
throws ArchivaDatabaseException
{
List<String> versions = new ArrayList<String>();
versions.add( "2.2-SNAPSHOT" );
populateDb( "org.apache.maven.plugins", "maven-install-plugin", versions );
}
public void populateDbForReleasedSnapshotsTest()
throws ArchivaDatabaseException
{
List<String> versions = new ArrayList<String>();
versions.add( "2.3-SNAPSHOT" );
populateDb( "org.apache.maven.plugins", "maven-plugin-plugin", versions );
}
} }

View File

@ -1,5 +1,7 @@
package org.apache.maven.archiva.consumers.core.repository; package org.apache.maven.archiva.consumers.core.repository;
import java.util.Collections;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
@ -19,14 +21,6 @@
* under the License. * under the License.
*/ */
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexStub;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
/** /**
* Test RetentionsCountRepositoryPurgeTest * Test RetentionsCountRepositoryPurgeTest
* *
@ -41,13 +35,11 @@ protected void setUp()
{ {
super.setUp(); super.setUp();
Map<String, RepositoryContentIndex> map = new HashMap<String, RepositoryContentIndex>(); repoPurge =
map.put( "filecontent", new LuceneRepositoryContentIndexStub( 2 ) ); new RetentionCountRepositoryPurge(
map.put( "hashcodes", new LuceneRepositoryContentIndexStub( 2 ) ); getRepository(),
map.put( "bytecode", new LuceneRepositoryContentIndexStub( 2 ) ); getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
Collections.singletonList( listener ) );
repoPurge = new RetentionCountRepositoryPurge( getRepository(), dao,
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(), map );
} }
/** /**
@ -58,12 +50,23 @@ protected void setUp()
public void testIfAJarWasFound() public void testIfAJarWasFound()
throws Exception throws Exception
{ {
populateIfJarWasFoundDb();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.153317-1", "jar" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.153317-1", "pom" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.160758-2", "jar" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.jruby.plugins", "jruby-rake-plugin",
"1.0RC1-20070504.160758-2", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT ); repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
listenerControl.verify();
String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT"; String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT";
// assert if removed from repo // assert if removed from repo
@ -105,11 +108,18 @@ public void testIfAJarWasFound()
public void testIfAPomWasFound() public void testIfAPomWasFound()
throws Exception throws Exception
{ {
populateIfPomWasFoundDb();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
"1.1.2-20070427.065136-1", "jar" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.codehaus.castor", "castor-anttasks",
"1.1.2-20070427.065136-1", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM ); repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
listenerControl.verify();
String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT"; String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT";
@ -146,12 +156,19 @@ public void testIfAPomWasFound()
public void testOrderOfDeletion() public void testOrderOfDeletion()
throws Exception throws Exception
{ {
populateDbForTestOrderOfDeletion();
String repoRoot = prepareTestRepos(); String repoRoot = prepareTestRepos();
// test listeners for the correct artifacts
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
"1.1.2-20070427.065136-1", "maven-plugin" ) );
listener.deleteArtifact( getRepository(), createArtifact( "org.apache.maven.plugins", "maven-assembly-plugin",
"1.1.2-20070427.065136-1", "pom" ) );
listenerControl.replay();
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION ); repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
listenerControl.verify();
String versionRoot = repoRoot + String versionRoot = repoRoot +
"/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT"; "/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
@ -177,27 +194,4 @@ public void testOrderOfDeletion()
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.sha1" ); assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.sha1" );
assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.md5" ); assertExists( versionRoot + "/maven-assembly-plugin-1.1.2-20070615.105019-3.pom.md5" );
} }
public void populateIfJarWasFoundDb()
throws Exception
{
List<String> versions = new ArrayList<String>();
versions.add( "1.0RC1-20070504.153317-1" );
versions.add( "1.0RC1-20070504.160758-2" );
versions.add( "1.0RC1-20070505.090015-3" );
versions.add( "1.0RC1-20070506.090132-4" );
populateDb( "org.jruby.plugins", "jruby-rake-plugin", versions );
}
public void populateIfPomWasFoundDb()
throws Exception
{
List<String> versions = new ArrayList<String>();
versions.add( "1.1.2-20070427.065136-1" );
versions.add( "1.1.2-20070615.105019-3" );
versions.add( "1.1.2-20070506.163513-2" );
populateDb( "org.codehaus.castor", "castor-anttasks", versions );
}
} }

View File

@ -1,60 +0,0 @@
package org.apache.maven.archiva.consumers.core.repository.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
/**
* LuceneRepositoryContenIndexFactoryStub
*
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public class LuceneRepositoryContentIndexFactoryStub
implements RepositoryContentIndexFactory
{
private int expectedRecordsSize = 0;
public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
}
public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
}
public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub( expectedRecordsSize );
}
public void setExpectedRecordsSize( int size )
{
expectedRecordsSize = size;
}
}

View File

@ -1,147 +0,0 @@
package org.apache.maven.archiva.consumers.core.repository.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Collection;
import junit.framework.Assert;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
/**
* @author <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version
*/
public class LuceneRepositoryContentIndexStub
implements RepositoryContentIndex
{
private int expectedRecordsSize;
public LuceneRepositoryContentIndexStub()
{
}
public LuceneRepositoryContentIndexStub( int size )
{
expectedRecordsSize = size;
}
public void deleteRecords( Collection records )
throws RepositoryIndexException
{
Assert.assertEquals( expectedRecordsSize, records.size() );
}
public boolean exists()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return false;
}
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return null;
}
public Analyzer getAnalyzer()
{
// TODO Auto-generated method stub
return null;
}
public LuceneEntryConverter getEntryConverter()
{
// TODO Auto-generated method stub
return null;
}
public String getId()
{
// TODO Auto-generated method stub
return null;
}
public File getIndexDirectory()
{
// TODO Auto-generated method stub
return null;
}
public QueryParser getQueryParser()
{
// TODO Auto-generated method stub
return null;
}
public ManagedRepositoryConfiguration getRepository()
{
// TODO Auto-generated method stub
return null;
}
public Searchable getSearchable()
throws RepositoryIndexSearchException
{
// TODO Auto-generated method stub
return null;
}
public void indexRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void deleteRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
}

View File

@ -31,10 +31,6 @@
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role> <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>retention-count</role-hint> <role-hint>retention-count</role-hint>
</requirement> </requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement> <requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role> <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>retention-count</role-hint> <role-hint>retention-count</role-hint>
@ -46,11 +42,6 @@
<role>org.apache.maven.archiva.configuration.FileTypes</role> <role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>retention-count</role-hint> <role-hint>retention-count</role-hint>
</requirement> </requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<field-name>indexFactory</field-name>
</requirement>
</requirements> </requirements>
<configuration> <configuration>
<id>repository-purge</id> <id>repository-purge</id>
@ -123,10 +114,6 @@
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role> <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>days-old</role-hint> <role-hint>days-old</role-hint>
</requirement> </requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement> <requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role> <role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<role-hint>days-old</role-hint> <role-hint>days-old</role-hint>
@ -138,11 +125,6 @@
<role>org.apache.maven.archiva.configuration.FileTypes</role> <role>org.apache.maven.archiva.configuration.FileTypes</role>
<role-hint>days-old</role-hint> <role-hint>days-old</role-hint>
</requirement> </requirement>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<field-name>indexFactory</field-name>
</requirement>
</requirements> </requirements>
<configuration> <configuration>
<id>repository-purge</id> <id>repository-purge</id>
@ -203,106 +185,5 @@
</requirement> </requirement>
</requirements> </requirements>
</component> </component>
<!-- DAOs -->
<component>
<role>org.apache.maven.archiva.database.ArchivaDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArchivaDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.ArtifactDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoArtifactDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.ProjectModelDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoProjectModelDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.database.RepositoryProblemDAO</role>
<role-hint>jdo</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoRepositoryProblemDAO</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JdoAccess -->
<component>
<role>org.apache.maven.archiva.database.jdo.JdoAccess</role>
<role-hint>archiva</role-hint>
<implementation>org.apache.maven.archiva.database.jdo.JdoAccess</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
</requirement>
</requirements>
</component>
<!-- JDO Factory -->
<component>
<role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint>
<implementation>org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory</implementation>
<configuration>
<persistenceManagerFactoryClass>org.jpox.PersistenceManagerFactoryImpl</persistenceManagerFactoryClass>
<driverName>org.hsqldb.jdbcDriver</driverName>
<userName>sa</userName>
<password></password>
<url>jdbc:hsqldb:mem:testdb</url>
<otherProperties>
<property>
<name>javax.jdo.PersistenceManagerFactoryClass</name>
<value>org.jpox.PersistenceManagerFactoryImpl</value>
</property>
</otherProperties>
</configuration>
</component>
<!-- LuceneRepositoryIndexFactory -->
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.consumers.core.repository.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
</component>
</components> </components>
</component-set> </component-set>

View File

@ -0,0 +1,70 @@
package org.apache.maven.archiva.indexer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
/**
* Process repository management events and respond appropriately.
*
* @todo creating index instances every time is inefficient, the plugin needs to have a repository context to operate in
* @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="indexer"
*/
public class RepositoryContentIndexEventListener
implements RepositoryListener
{
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory indexFactory;
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
{
RepositoryContentIndex index = indexFactory.createFileContentIndex( repository.getRepository() );
FileContentRecord fileContentRecord = new FileContentRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
fileContentRecord.setFilename( repository.toPath( artifact ) );
index.deleteRecord( fileContentRecord );
index = indexFactory.createHashcodeIndex( repository.getRepository() );
HashcodesRecord hashcodesRecord = new HashcodesRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
hashcodesRecord.setArtifact( artifact );
index.deleteRecord( hashcodesRecord );
index = indexFactory.createBytecodeIndex( repository.getRepository() );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
fileContentRecord.setRepositoryId( repository.getRepository().getId() );
bytecodeRecord.setArtifact( artifact );
index.deleteRecord( bytecodeRecord );
}
catch ( RepositoryIndexException e )
{
// Ignore
}
}
}

View File

@ -0,0 +1,167 @@
package org.apache.maven.archiva.indexer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.search.Hits;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Searcher;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.indexer.search.BytecodeIndexPopulator;
import org.apache.maven.archiva.indexer.search.FileContentIndexPopulator;
import org.apache.maven.archiva.indexer.search.HashcodesIndexPopulator;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
public class RepositoryContentIndexEventListenerTest
extends PlexusInSpringTestCase
{
private static final String TEST_DEFAULT_REPOSITORY_NAME = "Test Default Repository";
private static final String TEST_DEFAULT_REPO_ID = "test-repo";
private RepositoryListener listener;
@Override
protected void setUp()
throws Exception
{
super.setUp();
listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "indexer" );
}
public void testWiring()
{
List<RepositoryListener> listeners =
PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
getApplicationContext() );
assertEquals( 1, listeners.size() );
assertEquals( listener, listeners.get( 0 ) );
}
public ArchivaArtifact createArtifact( String artifactId, String version )
{
ArchivaArtifact artifact =
new ArchivaArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}
public void testDeleteArtifact()
throws Exception
{
RepositoryContentIndexFactory indexFactory =
(RepositoryContentIndexFactory) lookup( RepositoryContentIndexFactory.class.getName(), "lucene" );
File repoDir = new File( getBasedir(), "src/test/managed-repository" );
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
ManagedRepositoryConfiguration repository =
createRepository( TEST_DEFAULT_REPO_ID, TEST_DEFAULT_REPOSITORY_NAME, repoDir );
File indexLocation = new File( "target/index-events-" + getName() + "/" );
MockConfiguration config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( TEST_DEFAULT_REPO_ID );
repoConfig.setName( TEST_DEFAULT_REPOSITORY_NAME );
repoConfig.setLocation( repoDir.getAbsolutePath() );
repoConfig.setIndexDir( indexLocation.getAbsolutePath() );
repoConfig.setScanned( true );
if ( indexLocation.exists() )
{
FileUtils.deleteDirectory( indexLocation );
}
config.getConfiguration().addManagedRepository( repoConfig );
// Create the (empty) indexes.
RepositoryContentIndex indexHashcode = indexFactory.createHashcodeIndex( repository );
RepositoryContentIndex indexBytecode = indexFactory.createBytecodeIndex( repository );
RepositoryContentIndex indexContents = indexFactory.createFileContentIndex( repository );
// Now populate them.
Map<String, HashcodesRecord> hashcodesMap = new HashcodesIndexPopulator().populate( new File( getBasedir() ) );
indexHashcode.indexRecords( hashcodesMap.values() );
assertEquals( "Hashcode Key Count", hashcodesMap.size(), indexHashcode.getAllRecordKeys().size() );
assertRecordCount( indexHashcode, hashcodesMap.size() );
Map<String, BytecodeRecord> bytecodeMap = new BytecodeIndexPopulator().populate( new File( getBasedir() ) );
indexBytecode.indexRecords( bytecodeMap.values() );
assertEquals( "Bytecode Key Count", bytecodeMap.size(), indexBytecode.getAllRecordKeys().size() );
assertRecordCount( indexBytecode, bytecodeMap.size() );
Map<String, FileContentRecord> contentMap = new FileContentIndexPopulator().populate( new File( getBasedir() ) );
indexContents.indexRecords( contentMap.values() );
assertEquals( "File Content Key Count", contentMap.size(), indexContents.getAllRecordKeys().size() );
assertRecordCount( indexContents, contentMap.size() );
ManagedRepositoryContent repositoryContent =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
repositoryContent.setRepository( repository );
ArchivaArtifact artifact =
new ArchivaArtifact( "org.apache.maven.archiva", "archiva-common", "1.0", "", "jar" );
listener.deleteArtifact( repositoryContent, artifact );
artifact =
new ArchivaArtifact( "org.apache.maven.archiva.record", "test-pom", "1.0", "", "pom" );
listener.deleteArtifact( repositoryContent, artifact );
assertRecordCount( indexHashcode, hashcodesMap.size() - 1 );
assertRecordCount( indexBytecode, bytecodeMap.size() - 1 );
assertRecordCount( indexContents, contentMap.size() - 1 );
}
protected ManagedRepositoryConfiguration createRepository( String id, String name, File location )
{
ManagedRepositoryConfiguration repo = new ManagedRepositoryConfiguration();
repo.setId( id );
repo.setName( name );
repo.setLocation( location.getAbsolutePath() );
return repo;
}
private void assertRecordCount( RepositoryContentIndex index, int expectedCount )
throws Exception
{
Query query = new MatchAllDocsQuery();
Searcher searcher = (Searcher) index.getSearchable();
Hits hits = searcher.search( query );
assertEquals( "Expected Record Count for " + index.getId(), expectedCount, hits.length() );
}
}

View File

@ -0,0 +1,22 @@
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.indexer.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene</role-hint>
<implementation>org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndexFactory</implementation>
<description>Factory for Lucene repository content index instances.</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>configuration</field-name>
</requirement>
</requirements>
</component>
</components>
</component-set>

View File

@ -0,0 +1,37 @@
package org.apache.maven.archiva.repository.events;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Listen to events on the repository. This class is a stopgap
* refactoring measure until an event bus is in place to handle
* generic events such as these.
*/
public interface RepositoryListener
{
/**
* Event for the deletion of a given artifact.
* @param artifactPath the path to the artifact that was deleted.
*/
void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact );
}

View File

@ -74,10 +74,6 @@
<groupId>log4j</groupId> <groupId>log4j</groupId>
<artifactId>log4j</artifactId> <artifactId>log4j</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
</dependency>
<!-- TEST DEPS --> <!-- TEST DEPS -->
<dependency> <dependency>
<groupId>org.codehaus.plexus.registry</groupId> <groupId>org.codehaus.plexus.registry</groupId>

View File

@ -0,0 +1,55 @@
package org.apache.maven.archiva.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
/**
* Process repository management events and respond appropriately.
*
* @plexus.component role="org.apache.maven.archiva.repository.events.RepositoryListener" role-hint="database"
*/
public class RepositoryDatabaseEventListener
implements RepositoryListener
{
/**
* @plexus.requirement role-hint="jdo"
*/
private ArtifactDAO artifactDAO;
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
{
ArchivaArtifact queriedArtifact =
artifactDAO.getArtifact( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(),
artifact.getClassifier(), artifact.getType() );
artifactDAO.deleteArtifact( queriedArtifact );
}
catch ( ArchivaDatabaseException e )
{
// ignored
}
// TODO [MRM-37]: re-run the database consumers to clean up
}
}

View File

@ -0,0 +1,90 @@
package org.apache.maven.archiva.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Date;
import java.util.List;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.spring.PlexusToSpringUtils;
public class RepositoryDatabaseEventListenerTest
extends AbstractArchivaDatabaseTestCase
{
private RepositoryListener listener;
@Override
protected void setUp()
throws Exception
{
super.setUp();
listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "database" );
}
public void testWiring()
{
List<RepositoryListener> listeners =
PlexusToSpringUtils.lookupList( PlexusToSpringUtils.buildSpringId( RepositoryListener.class ),
getApplicationContext() );
assertEquals( 1, listeners.size() );
assertEquals( listener, listeners.get( 0 ) );
}
public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
}
public void testDeleteArtifact()
throws Exception
{
ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
// Setup artifacts in fresh DB.
ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao );
artifactDao.saveArtifact( artifact );
assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
"jar" ) );
artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
ManagedRepositoryContent repository =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
listener.deleteArtifact( repository, artifact );
try
{
artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
fail( "Should not find artifact" );
}
catch ( ObjectNotFoundException e )
{
assertTrue( true );
}
}
}

View File

@ -18,6 +18,7 @@
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-model</artifactId> <artifactId>archiva-model</artifactId>
</dependency> </dependency>
<!-- TODO: replace with metadata processor -->
<dependency> <dependency>
<groupId>org.apache.archiva</groupId> <groupId>org.apache.archiva</groupId>
<artifactId>archiva-database</artifactId> <artifactId>archiva-database</artifactId>

View File

@ -171,11 +171,6 @@
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId> <artifactId>spring-web</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.apache.derby</groupId>
<artifactId>derby</artifactId>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>javax.mail</groupId> <groupId>javax.mail</groupId>
<artifactId>mail</artifactId> <artifactId>mail</artifactId>

View File

@ -0,0 +1,28 @@
Stage 1: remove use of database and index from core consumers (move implementation into respective database and index modules)
Done!
Stage 2: separate model from JPOX annotated classes, centralising JPOX use in database
* archiva-model to be reviewed, possibly split into a basic model with extensible parts. See metadata proposal
* add consumer to generate Archiva metadata at same time as database model
Stage 3: add a basic repository querying API for base artifact information and retrieval of metadata
* RSS, browse
* consider repository-api refactorings
* at this point, should be able to have functional Archiva without a database
* note that metadata need not be stored with the artifacts themselves, but will be by default
Stage 4: incorporation of event API
* used to centralise arrival, removal, etc of files/artifacts in the repository
* errors should be events as well to avoid exceptions in the logs and instead meaningful handling/reporting
Stage 5: isolate scanning code
* Repository should operate without scanning code, it should push events if enabled
* better assessment of its progress, performance
* removal of database / repository scanning duality - all operations are driven by the event bus
* move some database operations to a housekeeping scheduled task (same for index), make scheduled tasks a listable item based on available plugins