[MRM-622]

-applied the part for removing the project model from the effective cache of the archiva-database-consumers.patch submitted by 
Dario Oliveros
-delete the project model from the db if it already exists in ProjectModelToDatabaseConsumer


git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@637928 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2008-03-17 15:05:40 +00:00
parent 036b3e9937
commit 3985b652f5
4 changed files with 159 additions and 58 deletions

View File

@ -30,6 +30,7 @@ import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
@ -64,12 +65,17 @@ public class DatabaseCleanupRemoveProjectConsumer
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public void beginScan()
{
// TODO Auto-generated method stub
@ -81,44 +87,54 @@ public class DatabaseCleanupRemoveProjectConsumer
}
public List<String> getIncludedTypes()
{
return null;
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
// Not a pom. Skip it.
// Not a pom. Skip it.
return;
}
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if( !file.exists() )
{
ArchivaProjectModel projectModel = projectModelDAO.getProjectModel(
artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if ( !file.exists() )
{
ArchivaProjectModel projectModel =
projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
// Force removal of project model from effective cache
String projectKey = toProjectKey( projectModel );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
throw new ConsumerException( e.getMessage() );
}
}
}
public String getDescription()
@ -134,16 +150,31 @@ public class DatabaseCleanupRemoveProjectConsumer
public boolean isPermanent()
{
return false;
}
}
public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
{
this.projectModelDAO = projectModelDAO;
}
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
public void setEffectiveProjectCache( Cache effectiveProjectCache )
{
this.effectiveProjectCache = effectiveProjectCache;
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
}

View File

@ -44,6 +44,7 @@ import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelFilter;
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
import org.codehaus.plexus.cache.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -61,7 +62,7 @@ public class ProjectModelToDatabaseConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private Logger log = LoggerFactory.getLogger( ProjectModelToDatabaseConsumer.class );
/**
* @plexus.configuration default-value="update-db-project"
*/
@ -105,6 +106,11 @@ public class ProjectModelToDatabaseConsumer
private List<String> includes;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public ProjectModelToDatabaseConsumer()
{
includes = new ArrayList<String>();
@ -134,11 +140,15 @@ public class ProjectModelToDatabaseConsumer
// Not a pom. Skip it.
return;
}
if ( hasProjectModelInDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) )
ArchivaProjectModel model = null;
// remove old project model if it already exists in the database
if ( ( model =
getProjectModelFromDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) ) != null )
{
// Already in the database. Skip it.
return;
removeOldProjectModel( model );
model = null;
}
ManagedRepositoryContent repo = getRepository( artifact );
@ -152,10 +162,10 @@ public class ProjectModelToDatabaseConsumer
try
{
ArchivaProjectModel model = reader.read( artifactFile );
model = reader.read( artifactFile );
model.setOrigin( "filesystem" );
// The version should be updated to the artifact/filename version if it is a unique snapshot
if ( VersionUtil.isUniqueSnapshot( artifact.getVersion() ) )
{
@ -170,13 +180,12 @@ public class ProjectModelToDatabaseConsumer
if ( isValidModel( model, repo, artifact ) )
{
log.debug( "Adding project model to database - " + Keys.toKey( model ) );
log.debug( "Adding project model to database - " + Keys.toKey( model ) );
dao.getProjectModelDAO().saveProjectModel( model );
}
else
{
log.warn(
"Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
log.warn( "Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
}
}
@ -188,31 +197,30 @@ public class ProjectModelToDatabaseConsumer
}
catch ( ArchivaDatabaseException e )
{
log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(),
e );
log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(), e );
}
catch ( Throwable t )
{
// Catch the other errors in the process to allow the rest of the process to complete.
log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() +
" : " + t.getMessage(), t );
log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() + " : " +
t.getMessage(), t );
}
}
private boolean hasProjectModelInDatabase( String groupId, String artifactId, String version )
private ArchivaProjectModel getProjectModelFromDatabase( String groupId, String artifactId, String version )
{
try
{
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
return ( model != null );
return model;
}
catch ( ObjectNotFoundException e )
{
return false;
return null;
}
catch ( ArchivaDatabaseException e )
{
return false;
return null;
}
}
@ -259,8 +267,8 @@ public class ProjectModelToDatabaseConsumer
appendModel( emsg, model );
emsg.append( "]: The model artifactId [" ).append( model.getArtifactId() );
emsg.append( "] does not match the artifactId portion of the filename: " ).append( artifact.getArtifactId() );
log.warn(emsg.toString() );
log.warn( emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
@ -275,8 +283,8 @@ public class ProjectModelToDatabaseConsumer
appendModel( emsg, model );
emsg.append( "]; The model version [" ).append( model.getVersion() );
emsg.append( "] does not match the version portion of the filename: " ).append( artifact.getVersion() );
log.warn(emsg.toString() );
log.warn( emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
@ -297,7 +305,7 @@ public class ProjectModelToDatabaseConsumer
throws ConsumerException
{
ManagedRepositoryContent repo = getRepository( artifact );
RepositoryProblem problem = new RepositoryProblem();
problem.setRepositoryId( artifact.getModel().getRepositoryId() );
problem.setPath( repo.toPath( artifact ) );
@ -320,4 +328,37 @@ public class ProjectModelToDatabaseConsumer
}
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
private void removeOldProjectModel( ArchivaProjectModel model )
{
try
{
dao.getProjectModelDAO().deleteProjectModel( model );
}
catch ( ArchivaDatabaseException ae )
{
log.error( "Unable to delete existing project model." );
}
// Force removal of project model from effective cache
String projectKey = toProjectKey( model );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}

View File

@ -19,6 +19,7 @@ package org.apache.maven.archiva.consumers.database;
* under the License.
*/
import org.codehaus.plexus.cache.Cache;
import org.easymock.MockControl;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
@ -37,6 +38,8 @@ public class DatabaseCleanupRemoveProjectConsumerTest
private ProjectModelDAO projectModelDAOMock;
private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
private Cache effectiveProjectCache;
public void setUp()
throws Exception
@ -47,11 +50,15 @@ public class DatabaseCleanupRemoveProjectConsumerTest
projectModelDAOControl = MockControl.createControl( ProjectModelDAO.class );
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
}
public void testIfArtifactWasNotDeleted()

View File

@ -62,4 +62,26 @@
</requirements>
</component>
</components>
<component>
<role>org.codehaus.plexus.cache.Cache</role>
<role-hint>effective-project-cache</role-hint>
<implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
<description>Effective Project Cache</description>
<configuration>
<disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
<disk-persistent>true</disk-persistent>
<disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
<eternal>true</eternal>
<max-elements-in-memory>1000</max-elements-in-memory>
<memory-eviction-policy>LRU</memory-eviction-policy>
<name>effective-project-cache</name>
<overflow-to-disk>false</overflow-to-disk>
<!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
<!-- 45 minutes = 2700 seconds -->
<time-to-idle-seconds>2700</time-to-idle-seconds>
<!-- 30 minutes = 1800 seconds -->
<time-to-live-seconds>1800</time-to-live-seconds>
</configuration>
</component>
</component-set>