-merged -r637928 of trunk for MRM-622

git-svn-id: https://svn.apache.org/repos/asf/archiva/branches@642954 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2008-03-31 10:30:30 +00:00
parent cff6df20a4
commit fa9c843f24
4 changed files with 153 additions and 48 deletions

View File

@ -30,6 +30,7 @@
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
@ -64,12 +65,17 @@ public class DatabaseCleanupRemoveProjectConsumer
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public void beginScan()
{
// TODO Auto-generated method stub
@ -81,44 +87,54 @@ public void completeScan()
}
public List<String> getIncludedTypes()
{
return null;
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
// Not a pom. Skip it.
// Not a pom. Skip it.
return;
}
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if( !file.exists() )
{
ArchivaProjectModel projectModel = projectModelDAO.getProjectModel(
artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if ( !file.exists() )
{
ArchivaProjectModel projectModel =
projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
// Force removal of project model from effective cache
String projectKey = toProjectKey( projectModel );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
throw new ConsumerException( e.getMessage() );
}
}
}
public String getDescription()
@ -134,16 +150,31 @@ public String getId()
public boolean isPermanent()
{
return false;
}
}
public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
{
this.projectModelDAO = projectModelDAO;
}
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
public void setEffectiveProjectCache( Cache effectiveProjectCache )
{
this.effectiveProjectCache = effectiveProjectCache;
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
}

View File

@ -41,6 +41,8 @@
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
import org.codehaus.plexus.cache.Cache;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
@ -101,6 +103,11 @@ public class ProjectModelToDatabaseConsumer
private List<String> includes;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public ProjectModelToDatabaseConsumer()
{
includes = new ArrayList<String>();
@ -130,11 +137,15 @@ public void processArchivaArtifact( ArchivaArtifact artifact )
// Not a pom. Skip it.
return;
}
if ( hasProjectModelInDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) )
ArchivaProjectModel model = null;
// remove old project model if it already exists in the database
if ( ( model =
getProjectModelFromDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) ) != null )
{
// Already in the database. Skip it.
return;
removeOldProjectModel( model );
model = null;
}
ManagedRepositoryContent repo = getRepository( artifact );
@ -148,10 +159,10 @@ public void processArchivaArtifact( ArchivaArtifact artifact )
try
{
ArchivaProjectModel model = reader.read( artifactFile );
model = reader.read( artifactFile );
model.setOrigin( "filesystem" );
// The version should be updated to the artifact/filename version if it is a unique snapshot
if ( VersionUtil.isUniqueSnapshot( artifact.getVersion() ) )
{
@ -167,6 +178,7 @@ public void processArchivaArtifact( ArchivaArtifact artifact )
if ( isValidModel( model, repo, artifact ) )
{
getLogger().debug( "Adding project model to database - " + Keys.toKey( model ) );
dao.getProjectModelDAO().saveProjectModel( model );
}
else
@ -195,20 +207,20 @@ public void processArchivaArtifact( ArchivaArtifact artifact )
}
}
private boolean hasProjectModelInDatabase( String groupId, String artifactId, String version )
private ArchivaProjectModel getProjectModelFromDatabase( String groupId, String artifactId, String version )
{
try
{
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
return ( model != null );
return model;
}
catch ( ObjectNotFoundException e )
{
return false;
return null;
}
catch ( ArchivaDatabaseException e )
{
return false;
return null;
}
}
@ -255,7 +267,7 @@ private boolean isValidModel( ArchivaProjectModel model, ManagedRepositoryConten
appendModel( emsg, model );
emsg.append( "]: The model artifactId [" ).append( model.getArtifactId() );
emsg.append( "] does not match the artifactId portion of the filename: " ).append( artifact.getArtifactId() );
getLogger().warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
@ -271,7 +283,7 @@ private boolean isValidModel( ArchivaProjectModel model, ManagedRepositoryConten
appendModel( emsg, model );
emsg.append( "]; The model version [" ).append( model.getVersion() );
emsg.append( "] does not match the version portion of the filename: " ).append( artifact.getVersion() );
getLogger().warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
@ -293,7 +305,7 @@ private void addProblem( ArchivaArtifact artifact, String msg )
throws ConsumerException
{
ManagedRepositoryContent repo = getRepository( artifact );
RepositoryProblem problem = new RepositoryProblem();
problem.setRepositoryId( artifact.getModel().getRepositoryId() );
problem.setPath( repo.toPath( artifact ) );
@ -316,4 +328,37 @@ private void addProblem( ArchivaArtifact artifact, String msg )
}
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
private void removeOldProjectModel( ArchivaProjectModel model )
{
try
{
dao.getProjectModelDAO().deleteProjectModel( model );
}
catch ( ArchivaDatabaseException ae )
{
getLogger().error( "Unable to delete existing project model." );
}
// Force removal of project model from effective cache
String projectKey = toProjectKey( model );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}

View File

@ -19,6 +19,7 @@
* under the License.
*/
import org.codehaus.plexus.cache.Cache;
import org.easymock.MockControl;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
@ -37,6 +38,8 @@ public class DatabaseCleanupRemoveProjectConsumerTest
private ProjectModelDAO projectModelDAOMock;
private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
private Cache effectiveProjectCache;
public void setUp()
throws Exception
@ -47,11 +50,15 @@ public void setUp()
projectModelDAOControl = MockControl.createControl( ProjectModelDAO.class );
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
}
public void testIfArtifactWasNotDeleted()

View File

@ -62,4 +62,26 @@
</requirements>
</component>
</components>
<component>
<role>org.codehaus.plexus.cache.Cache</role>
<role-hint>effective-project-cache</role-hint>
<implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
<description>Effective Project Cache</description>
<configuration>
<disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
<disk-persistent>true</disk-persistent>
<disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
<eternal>true</eternal>
<max-elements-in-memory>1000</max-elements-in-memory>
<memory-eviction-policy>LRU</memory-eviction-policy>
<name>effective-project-cache</name>
<overflow-to-disk>false</overflow-to-disk>
<!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
<!-- 45 minutes = 2700 seconds -->
<time-to-idle-seconds>2700</time-to-idle-seconds>
<!-- 30 minutes = 1800 seconds -->
<time-to-live-seconds>1800</time-to-live-seconds>
</configuration>
</component>
</component-set>