[MRM-622]

-applied the part for removing the project model from the effective cache of the archiva-database-consumers.patch submitted by 
Dario Oliveros
-delete the project model from the db if it already exists in ProjectModelToDatabaseConsumer


git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@637928 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2008-03-17 15:05:40 +00:00
parent 036b3e9937
commit 3985b652f5
4 changed files with 159 additions and 58 deletions

View File

@ -30,6 +30,7 @@ import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
@ -70,6 +71,11 @@ public class DatabaseCleanupRemoveProjectConsumer
*/
private RepositoryContentFactory repositoryFactory;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public void beginScan()
{
// TODO Auto-generated method stub
@ -103,16 +109,26 @@ public class DatabaseCleanupRemoveProjectConsumer
if ( !file.exists() )
{
ArchivaProjectModel projectModel = projectModelDAO.getProjectModel(
artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() );
ArchivaProjectModel projectModel =
projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
// Force removal of project model from effective cache
String projectKey = toProjectKey( projectModel );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
re.getMessage() );
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
@ -146,4 +162,19 @@ public class DatabaseCleanupRemoveProjectConsumer
this.repositoryFactory = repositoryFactory;
}
public void setEffectiveProjectCache( Cache effectiveProjectCache )
{
this.effectiveProjectCache = effectiveProjectCache;
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
}

View File

@ -44,6 +44,7 @@ import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelFilter;
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
import org.codehaus.plexus.cache.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -105,6 +106,11 @@ public class ProjectModelToDatabaseConsumer
private List<String> includes;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public ProjectModelToDatabaseConsumer()
{
includes = new ArrayList<String>();
@ -135,10 +141,14 @@ public class ProjectModelToDatabaseConsumer
return;
}
if ( hasProjectModelInDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) )
ArchivaProjectModel model = null;
// remove old project model if it already exists in the database
if ( ( model =
getProjectModelFromDatabase( artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion() ) ) != null )
{
// Already in the database. Skip it.
return;
removeOldProjectModel( model );
model = null;
}
ManagedRepositoryContent repo = getRepository( artifact );
@ -152,7 +162,7 @@ public class ProjectModelToDatabaseConsumer
try
{
ArchivaProjectModel model = reader.read( artifactFile );
model = reader.read( artifactFile );
model.setOrigin( "filesystem" );
@ -175,8 +185,7 @@ public class ProjectModelToDatabaseConsumer
}
else
{
log.warn(
"Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
log.warn( "Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
}
}
@ -188,31 +197,30 @@ public class ProjectModelToDatabaseConsumer
}
catch ( ArchivaDatabaseException e )
{
log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(),
e );
log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(), e );
}
catch ( Throwable t )
{
// Catch the other errors in the process to allow the rest of the process to complete.
log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() +
" : " + t.getMessage(), t );
log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() + " : " +
t.getMessage(), t );
}
}
private boolean hasProjectModelInDatabase( String groupId, String artifactId, String version )
private ArchivaProjectModel getProjectModelFromDatabase( String groupId, String artifactId, String version )
{
try
{
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
return ( model != null );
return model;
}
catch ( ObjectNotFoundException e )
{
return false;
return null;
}
catch ( ArchivaDatabaseException e )
{
return false;
return null;
}
}
@ -320,4 +328,37 @@ public class ProjectModelToDatabaseConsumer
}
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
private void removeOldProjectModel( ArchivaProjectModel model )
{
try
{
dao.getProjectModelDAO().deleteProjectModel( model );
}
catch ( ArchivaDatabaseException ae )
{
log.error( "Unable to delete existing project model." );
}
// Force removal of project model from effective cache
String projectKey = toProjectKey( model );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}

View File

@ -19,6 +19,7 @@ package org.apache.maven.archiva.consumers.database;
* under the License.
*/
import org.codehaus.plexus.cache.Cache;
import org.easymock.MockControl;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
@ -38,6 +39,8 @@ public class DatabaseCleanupRemoveProjectConsumerTest
private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
private Cache effectiveProjectCache;
public void setUp()
throws Exception
{
@ -49,9 +52,13 @@ public class DatabaseCleanupRemoveProjectConsumerTest
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
}
public void testIfArtifactWasNotDeleted()

View File

@ -62,4 +62,26 @@
</requirements>
</component>
</components>
<component>
<role>org.codehaus.plexus.cache.Cache</role>
<role-hint>effective-project-cache</role-hint>
<implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
<description>Effective Project Cache</description>
<configuration>
<disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
<disk-persistent>true</disk-persistent>
<disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
<eternal>true</eternal>
<max-elements-in-memory>1000</max-elements-in-memory>
<memory-eviction-policy>LRU</memory-eviction-policy>
<name>effective-project-cache</name>
<overflow-to-disk>false</overflow-to-disk>
<!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
<!-- 45 minutes = 2700 seconds -->
<time-to-idle-seconds>2700</time-to-idle-seconds>
<!-- 30 minutes = 1800 seconds -->
<time-to-live-seconds>1800</time-to-live-seconds>
</configuration>
</component>
</component-set>