[MRM-1025] remove database cleanup consumers and centralise functionality in RepositoryListener

Note: this commit requires that a following work item be completed to add a service for scanning metadata and cleaning up if a file is erased. Workflow will be documented first.


git-svn-id: https://svn.apache.org/repos/asf/archiva/branches/MRM-1025@723334 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2008-12-04 14:13:52 +00:00
parent 4fe80f811a
commit 0507f8aa80
29 changed files with 160 additions and 1805 deletions

View File

@ -1032,6 +1032,7 @@
<name>cleanupConsumers</name>
<version>1.0.0+</version>
<required>true</required>
<deprecated>true</deprecated>
<association>
<type>String</type>
<multiplicity>*</multiplicity>

View File

@ -1,162 +0,0 @@
package org.apache.maven.archiva.consumers.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.Constraint;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByArtifactConstraint;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import java.util.List;
import java.io.File;
/**
* Consumer for cleaning up the database of artifacts that are no longer existing in the repository.
*
* <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
* role-hint="not-present-remove-db-artifact"
* instantiation-strategy="per-lookup"
*/
public class DatabaseCleanupRemoveArtifactConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
/**
* @plexus.configuration default-value="not-present-remove-db-artifact"
*/
private String id;
/**
* @plexus.configuration default-value="Remove artifact from database if not present on filesystem."
*/
private String description;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArtifactDAO artifactDAO;
/**
* @plexus.requirement role-hint="jdo"
*/
private RepositoryProblemDAO repositoryProblemDAO;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
public void beginScan()
{
// TODO Auto-generated method stub
}
public void completeScan()
{
// TODO Auto-generated method stub
}
public List<String> getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if( !file.exists() )
{
artifactDAO.deleteArtifact( artifact );
// Remove all repository problems related to this artifact
Constraint artifactConstraint = new RepositoryProblemByArtifactConstraint( artifact );
List<RepositoryProblem> repositoryProblems =
repositoryProblemDAO.queryRepositoryProblems( artifactConstraint );
if ( repositoryProblems != null )
{
for ( RepositoryProblem repositoryProblem : repositoryProblems )
{
repositoryProblemDAO.deleteRepositoryProblem( repositoryProblem );
}
}
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " +
re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
throw new ConsumerException( e.getMessage() );
}
}
public String getDescription()
{
return description;
}
public String getId()
{
return id;
}
public boolean isPermanent()
{
return false;
}
public void setArtifactDAO( ArtifactDAO artifactDAO)
{
this.artifactDAO = artifactDAO;
}
public void setRepositoryProblemDAO( RepositoryProblemDAO repositoryProblemDAO )
{
this.repositoryProblemDAO = repositoryProblemDAO;
}
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
}

View File

@ -1,179 +0,0 @@
package org.apache.maven.archiva.consumers.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.codehaus.plexus.cache.Cache;
import java.util.List;
import java.io.File;
/**
* Consumer for removing or deleting from the database the project models fo artifacts that have been
* deleted/removed from the repository.
*
* <a href="mailto:oching@apache.org">Maria Odea Ching</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
* role-hint="not-present-remove-db-project"
* instantiation-strategy="per-lookup"
*/
public class DatabaseCleanupRemoveProjectConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
/**
* @plexus.configuration default-value="not-present-remove-db-project"
*/
private String id;
/**
* @plexus.configuration default-value="Remove project from database if not present on filesystem."
*/
private String description;
/**
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public void beginScan()
{
// TODO Auto-generated method stub
}
public void completeScan()
{
// TODO Auto-generated method stub
}
public List<String> getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
if ( !StringUtils.equals( "pom", artifact.getType() ) )
{
// Not a pom. Skip it.
return;
}
try
{
ManagedRepositoryContent repositoryContent =
repositoryFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repositoryContent.getRepoRoot(), repositoryContent.toPath( artifact ) );
if ( !file.exists() )
{
ArchivaProjectModel projectModel =
projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
// Force removal of project model from effective cache
String projectKey = toProjectKey( projectModel );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
}
catch ( RepositoryException re )
{
throw new ConsumerException( "Can't run database cleanup remove artifact consumer: " + re.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
throw new ConsumerException( e.getMessage() );
}
}
public String getDescription()
{
return description;
}
public String getId()
{
return id;
}
public boolean isPermanent()
{
return false;
}
public void setProjectModelDAO( ProjectModelDAO projectModelDAO )
{
this.projectModelDAO = projectModelDAO;
}
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
public void setEffectiveProjectCache( Cache effectiveProjectCache )
{
this.effectiveProjectCache = effectiveProjectCache;
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
}

View File

@ -1,96 +0,0 @@
package org.apache.maven.archiva.consumers.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.easymock.MockControl;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
/**
* Test for DatabaseCleanupRemoveArtifactConsumerTest
*
*/
public class DatabaseCleanupRemoveArtifactConsumerTest
extends AbstractDatabaseCleanupTest
{
private MockControl artifactDAOControl;
private ArtifactDAO artifactDAOMock;
private MockControl repositoryProblemDAOControl;
private RepositoryProblemDAO repositoryProblemDAOMock;
private DatabaseCleanupRemoveArtifactConsumer dbCleanupRemoveArtifactConsumer;
public void setUp()
throws Exception
{
super.setUp();
dbCleanupRemoveArtifactConsumer = new DatabaseCleanupRemoveArtifactConsumer();
artifactDAOControl = MockControl.createControl( ArtifactDAO.class );
artifactDAOMock = (ArtifactDAO) artifactDAOControl.getMock();
repositoryProblemDAOControl = MockControl.createControl( RepositoryProblemDAO.class );
repositoryProblemDAOMock = (RepositoryProblemDAO) repositoryProblemDAOControl.getMock();
dbCleanupRemoveArtifactConsumer.setArtifactDAO( artifactDAOMock );
dbCleanupRemoveArtifactConsumer.setRepositoryProblemDAO( repositoryProblemDAOMock );
dbCleanupRemoveArtifactConsumer.setRepositoryFactory( repositoryFactory );
}
public void testIfArtifactWasNotDeleted()
throws Exception
{
ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "jar" );
artifactDAOControl.replay();
repositoryProblemDAOControl.replay();
dbCleanupRemoveArtifactConsumer.processArchivaArtifact( artifact );
artifactDAOControl.verify();
repositoryProblemDAOControl.verify();
}
public void testIfArtifactWasDeleted()
throws Exception
{
ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION, "jar" );
artifactDAOMock.deleteArtifact( artifact );
artifactDAOControl.replay();
dbCleanupRemoveArtifactConsumer.processArchivaArtifact( artifact );
artifactDAOControl.verify();
}
}

View File

@ -1,114 +0,0 @@
package org.apache.maven.archiva.consumers.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.codehaus.plexus.cache.Cache;
import org.easymock.MockControl;
import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel;
/**
* Test for DatabaseCleanupRemoveProjectConsumer
*
*/
public class DatabaseCleanupRemoveProjectConsumerTest
extends AbstractDatabaseCleanupTest
{
private MockControl projectModelDAOControl;
private ProjectModelDAO projectModelDAOMock;
private DatabaseCleanupRemoveProjectConsumer dbCleanupRemoveProjectConsumer;
private Cache effectiveProjectCache;
public void setUp()
throws Exception
{
super.setUp();
dbCleanupRemoveProjectConsumer = new DatabaseCleanupRemoveProjectConsumer();
projectModelDAOControl = MockControl.createControl( ProjectModelDAO.class );
projectModelDAOMock = (ProjectModelDAO) projectModelDAOControl.getMock();
effectiveProjectCache = (Cache) lookup( Cache.class, "effective-project-cache" );
dbCleanupRemoveProjectConsumer.setProjectModelDAO( projectModelDAOMock );
dbCleanupRemoveProjectConsumer.setRepositoryFactory( repositoryFactory );
dbCleanupRemoveProjectConsumer.setEffectiveProjectCache( effectiveProjectCache );
}
public void testIfArtifactWasNotDeleted()
throws Exception
{
ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "pom" );
projectModelDAOControl.replay();
dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
projectModelDAOControl.verify();
}
public void testIfArtifactWasDeleted()
throws Exception
{
ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION, "pom" );
ArchivaProjectModel projectModel = createProjectModel( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION );
//this should return a value
projectModelDAOControl.expectAndReturn(
projectModelDAOMock.getProjectModel( TEST_GROUP_ID, TEST_ARTIFACT_ID, TEST_VERSION ),
(ArchivaProjectModel) projectModel );
projectModelDAOMock.deleteProjectModel( projectModel );
projectModelDAOControl.replay();
dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
projectModelDAOControl.verify();
}
public void testIfArtifactWasNotAPom()
throws Exception
{
ArchivaArtifact artifact = createArtifact( TEST_GROUP_ID, "do-not-cleanup-artifact-test", TEST_VERSION, "jar" );
projectModelDAOControl.replay();
dbCleanupRemoveProjectConsumer.processArchivaArtifact( artifact );
projectModelDAOControl.verify();
}
public void tearDown()
throws Exception
{
super.tearDown();
}
}

View File

@ -1,65 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>database-cleanup</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>database-cleanup</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>prePolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postPolicies</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>database-cleanup</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<xml fileName="${basedir}/target/test/repository-manager.xml" config-optional="true" config-forceCreate="true"
config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<component>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>database-cleanup</role-hint>
</requirement>
</requirements>
</component>
</components>
</component-set>

View File

@ -1,87 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>database-cleanup</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>database-cleanup</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>prePolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postPolicies</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>database-cleanup</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<xml fileName="${basedir}/target/test/repository-manager.xml" config-optional="true" config-forceCreate="true"
config-name="org.apache.maven.archiva.base" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<component>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>database-cleanup</role-hint>
</requirement>
</requirements>
</component>
</components>
<component>
<role>org.codehaus.plexus.cache.Cache</role>
<role-hint>effective-project-cache</role-hint>
<implementation>org.codehaus.plexus.cache.ehcache.EhcacheCache</implementation>
<description>Effective Project Cache</description>
<configuration>
<disk-expiry-thread-interval-seconds>600</disk-expiry-thread-interval-seconds>
<disk-persistent>true</disk-persistent>
<disk-store-path>${java.io.tmpdir}/archiva/effectiveproject</disk-store-path>
<eternal>true</eternal>
<max-elements-in-memory>1000</max-elements-in-memory>
<memory-eviction-policy>LRU</memory-eviction-policy>
<name>effective-project-cache</name>
<overflow-to-disk>false</overflow-to-disk>
<!-- TODO: Adjust the time to live to be more sane (ie: huge 4+ hours) -->
<!-- 45 minutes = 2700 seconds -->
<time-to-idle-seconds>2700</time-to-idle-seconds>
<!-- 30 minutes = 1800 seconds -->
<time-to-live-seconds>1800</time-to-live-seconds>
</configuration>
</component>
</component-set>

View File

@ -1,152 +0,0 @@
package org.apache.maven.archiva.consumers.lucene;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeRecord;
import org.apache.maven.archiva.indexer.filecontent.FileContentRecord;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesRecord;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import java.io.File;
import java.util.List;
/**
* LuceneCleanupRemoveIndexedConsumer
*
* @version $Id$
* @plexus.component role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
* role-hint="not-present-remove-indexed" instantiation-strategy="per-lookup"
*/
public class LuceneCleanupRemoveIndexedConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
/**
* @plexus.configuration default-value="not-present-remove-indexed"
*/
private String id;
/**
* @plexus.configuration default-value="Remove indexed content if not present on filesystem."
*/
private String description;
/**
* @plexus.requirement role-hint="lucene"
*/
private RepositoryContentIndexFactory repoIndexFactory;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repoFactory;
public void beginScan()
{
// TODO Auto-generated method stub
}
public void completeScan()
{
// TODO Auto-generated method stub
}
public List<String> getIncludedTypes()
{
// TODO Auto-generated method stub
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
try
{
ManagedRepositoryContent repoContent =
repoFactory.getManagedRepositoryContent( artifact.getModel().getRepositoryId() );
File file = new File( repoContent.getRepoRoot(), repoContent.toPath( artifact ) );
if( !file.exists() )
{
RepositoryContentIndex bytecodeIndex = repoIndexFactory.createBytecodeIndex( repoContent.getRepository() );
RepositoryContentIndex hashcodesIndex = repoIndexFactory.createHashcodeIndex( repoContent.getRepository() );
RepositoryContentIndex fileContentIndex =
repoIndexFactory.createFileContentIndex( repoContent.getRepository() );
FileContentRecord fileContentRecord = new FileContentRecord();
fileContentRecord.setFilename( repoContent.toPath( artifact ) );
fileContentIndex.deleteRecord( fileContentRecord );
HashcodesRecord hashcodesRecord = new HashcodesRecord();
hashcodesRecord.setArtifact( artifact );
hashcodesIndex.deleteRecord( hashcodesRecord );
BytecodeRecord bytecodeRecord = new BytecodeRecord();
bytecodeRecord.setArtifact( artifact );
bytecodeIndex.deleteRecord( bytecodeRecord );
}
}
catch ( RepositoryException e )
{
throw new ConsumerException( "Can't run index cleanup consumer: " + e.getMessage() );
}
catch ( RepositoryIndexException e )
{
throw new ConsumerException( e.getMessage() );
}
}
public String getDescription()
{
return description;
}
public String getId()
{
return id;
}
public boolean isPermanent()
{
return false;
}
public void setRepositoryIndexFactory( RepositoryContentIndexFactory repoIndexFactory )
{
this.repoIndexFactory = repoIndexFactory;
}
public void setRepositoryContentFactory( RepositoryContentFactory repoFactory )
{
this.repoFactory = repoFactory;
}
}

View File

@ -1,76 +0,0 @@
package org.apache.maven.archiva.consumers.lucene;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
/**
* LuceneCleanupRemoveIndexedConsumerTest
*
* @version
*/
public class LuceneCleanupRemoveIndexedConsumerTest
extends PlexusInSpringTestCase
{
private DatabaseCleanupConsumer luceneCleanupRemoveIndexConsumer;
public void setUp()
throws Exception
{
super.setUp();
luceneCleanupRemoveIndexConsumer = (DatabaseCleanupConsumer)
lookup( DatabaseCleanupConsumer.class, "lucene-cleanup" );
}
public void testIfArtifactExists()
throws Exception
{
ArchivaArtifact artifact = createArtifact(
"org.apache.maven.archiva", "archiva-lucene-cleanup", "1.0", "jar" );
luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
}
public void testIfArtifactDoesNotExist()
throws Exception
{
ArchivaArtifact artifact = createArtifact(
"org.apache.maven.archiva", "deleted-artifact", "1.0", "jar" );
luceneCleanupRemoveIndexConsumer.processArchivaArtifact( artifact );
}
private ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
{
ArchivaArtifactModel model = new ArchivaArtifactModel();
model.setGroupId( groupId );
model.setArtifactId( artifactId );
model.setVersion( version );
model.setType( type );
model.setRepositoryId( "test-repo" );
return new ArchivaArtifact( model );
}
}

View File

@ -1,53 +0,0 @@
package org.apache.maven.archiva.consumers.lucene.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryContentIndexFactory;
/**
* LuceneRepositoryContenIndexFactoryStub
*
* @version
*/
public class LuceneRepositoryContentIndexFactoryStub
implements RepositoryContentIndexFactory
{
public RepositoryContentIndex createBytecodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
public RepositoryContentIndex createFileContentIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
public RepositoryContentIndex createHashcodeIndex( ManagedRepositoryConfiguration repository )
{
// TODO Auto-generated method stub
return new LuceneRepositoryContentIndexStub();
}
}

View File

@ -1,144 +0,0 @@
package org.apache.maven.archiva.consumers.lucene.stubs;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.util.Collection;
import junit.framework.Assert;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Searchable;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
/**
* @version
*/
public class LuceneRepositoryContentIndexStub
implements RepositoryContentIndex
{
public void deleteRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
Assert.assertEquals( 2, records.size() );
}
public boolean exists()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return false;
}
public Collection getAllRecordKeys()
throws RepositoryIndexException
{
// TODO Auto-generated method stub
return null;
}
public Analyzer getAnalyzer()
{
// TODO Auto-generated method stub
return null;
}
public LuceneEntryConverter getEntryConverter()
{
// TODO Auto-generated method stub
return null;
}
public String getId()
{
// TODO Auto-generated method stub
return null;
}
public File getIndexDirectory()
{
// TODO Auto-generated method stub
return null;
}
public QueryParser getQueryParser()
{
// TODO Auto-generated method stub
return null;
}
public ManagedRepositoryConfiguration getRepository()
{
// TODO Auto-generated method stub
return null;
}
public Searchable getSearchable()
throws RepositoryIndexSearchException
{
// TODO Auto-generated method stub
return null;
}
public void indexRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void modifyRecords( Collection records )
throws RepositoryIndexException
{
// TODO Auto-generated method stub
}
public void deleteRecord( LuceneRepositoryContentRecord record )
throws RepositoryIndexException
{
Assert.assertNotNull( record );
// fail since the record to be deleted should only be the deleted-artifact-1.0.jar
// according to the tests
if( record.getPrimaryKey().equals(
"org/apache/maven/archiva/archiva-lucene-cleanup/1.0/archiva-lucene-cleanup-1.0.jar" ) &&
record.getPrimaryKey().equals( "org.apache.maven.archiva:archiva-lucene-cleanup:1.0:jar" ) )
{
Assert.fail();
}
}
}

View File

@ -1,86 +0,0 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
<role-hint>lucene-cleanup</role-hint>
<implementation>org.apache.maven.archiva.consumers.lucene.LuceneCleanupRemoveIndexedConsumer</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<role-hint>lucene-cleanup</role-hint>
<field-name>repoIndexFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.repository.RepositoryContentFactory</role>
<implementation>org.apache.maven.archiva.repository.RepositoryContentFactory</implementation>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>lucene-cleanup</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>lucene-cleanup</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>lucene-cleanup</role-hint>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>prePolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postPolicies</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>lucene-cleanup</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
<component>
<role>org.apache.maven.archiva.indexer.RepositoryContentIndexFactory</role>
<implementation>org.apache.maven.archiva.consumers.lucene.stubs.LuceneRepositoryContentIndexFactoryStub</implementation>
<role-hint>lucene-cleanup</role-hint>
</component>
</components>
</component-set>

View File

@ -19,9 +19,15 @@ package org.apache.maven.archiva.database;
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.constraints.RepositoryProblemByArtifactConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.model.RepositoryProblem;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.events.RepositoryListener;
import org.codehaus.plexus.cache.Cache;
/**
* Process repository management events and respond appropriately.
@ -36,6 +42,21 @@ public class RepositoryDatabaseEventListener
*/
private ArtifactDAO artifactDAO;
/**
* @plexus.requirement role-hint="jdo"
*/
private RepositoryProblemDAO repositoryProblemDAO;
/**
* @plexus.requirement role-hint="jdo"
*/
private ProjectModelDAO projectModelDAO;
/**
* @plexus.requirement role-hint="effective-project-cache"
*/
private Cache effectiveProjectCache;
public void deleteArtifact( ManagedRepositoryContent repository, ArchivaArtifact artifact )
{
try
@ -50,6 +71,61 @@ public class RepositoryDatabaseEventListener
// ignored
}
// TODO [MRM-37]: re-run the database consumers to clean up
try
{
// Remove all repository problems related to this artifact
Constraint artifactConstraint = new RepositoryProblemByArtifactConstraint( artifact );
List<RepositoryProblem> repositoryProblems =
repositoryProblemDAO.queryRepositoryProblems( artifactConstraint );
if ( repositoryProblems != null )
{
for ( RepositoryProblem repositoryProblem : repositoryProblems )
{
repositoryProblemDAO.deleteRepositoryProblem( repositoryProblem );
}
}
}
catch ( ArchivaDatabaseException e )
{
// ignored
}
if ( "pom".equals( artifact.getType() ) )
{
try
{
ArchivaProjectModel projectModel =
projectModelDAO.getProjectModel( artifact.getGroupId(), artifact.getArtifactId(),
artifact.getVersion() );
projectModelDAO.deleteProjectModel( projectModel );
// Force removal of project model from effective cache
String projectKey = toProjectKey( projectModel );
synchronized ( effectiveProjectCache )
{
if ( effectiveProjectCache.hasKey( projectKey ) )
{
effectiveProjectCache.remove( projectKey );
}
}
}
catch ( ArchivaDatabaseException e )
{
// ignored
}
}
}
private String toProjectKey( ArchivaProjectModel project )
{
StringBuilder key = new StringBuilder();
key.append( project.getGroupId() ).append( ":" );
key.append( project.getArtifactId() ).append( ":" );
key.append( project.getVersion() );
return key.toString();
}
}

View File

@ -1,31 +0,0 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* DatabaseCleanupConsumer
*
* @version $Id$
*/
public interface DatabaseCleanupConsumer
extends ArchivaArtifactConsumer
{
}

View File

@ -52,13 +52,6 @@ public class DatabaseConsumers
*/
private List availableUnprocessedConsumers;
/**
* @plexus.requirement role="org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer"
*/
private List availableCleanupConsumers;
private Predicate selectedCleanupConsumers;
private Predicate selectedUnprocessedConsumers;
class SelectedUnprocessedConsumersPredicate
@ -80,31 +73,11 @@ public class DatabaseConsumers
}
}
class SelectedCleanupConsumersPredicate
implements Predicate
{
public boolean evaluate( Object object )
{
boolean satisfies = false;
if ( object instanceof DatabaseCleanupConsumer )
{
DatabaseCleanupConsumer consumer = (DatabaseCleanupConsumer) object;
DatabaseScanningConfiguration config = archivaConfiguration.getConfiguration().getDatabaseScanning();
return config.getCleanupConsumers().contains( consumer.getId() );
}
return satisfies;
}
}
public void initialize()
throws InitializationException
{
Predicate permanentConsumers = new PermanentConsumerPredicate();
selectedCleanupConsumers = new OrPredicate( permanentConsumers, new SelectedCleanupConsumersPredicate() );
selectedUnprocessedConsumers = new OrPredicate( permanentConsumers, new SelectedUnprocessedConsumersPredicate() );
}
@ -121,19 +94,6 @@ public class DatabaseConsumers
return ret;
}
/**
* Get the {@link List} of {@link DatabaseCleanupConsumer} objects for those
* consumers selected due to the configuration.
*
* @return the list of selected {@link DatabaseCleanupConsumer} objects.
*/
public List getSelectedCleanupConsumers()
{
List ret = new ArrayList();
ret.addAll( CollectionUtils.select( availableCleanupConsumers, selectedCleanupConsumers ) );
return ret;
}
/**
* Get the complete {@link List} of {@link DatabaseUnprocessedArtifactConsumer} objects
* that are available in the system, regardless of configuration.
@ -144,15 +104,4 @@ public class DatabaseConsumers
{
return Collections.unmodifiableList( this.availableUnprocessedConsumers );
}
/**
* Get the complete {@link List} of {@link DatabaseCleanupConsumer} objects
* that are available in the system, regardless of configuration.
*
* @return the list of all available {@link DatabaseCleanupConsumer} objects.
*/
public List getAvailableCleanupConsumers()
{
return Collections.unmodifiableList( this.availableCleanupConsumers );
}
}

View File

@ -29,15 +29,6 @@ import org.apache.maven.archiva.model.ArchivaArtifact;
*/
public interface DatabaseUpdater
{
/**
* Execute the {@link #updateAllUnprocessed()} and {@link #updateAllProcessed()}
* tasks in one go.
*
* @throws ArchivaDatabaseException
*/
public void update()
throws ArchivaDatabaseException;
/**
* Update all unprocessed content.
*
@ -53,26 +44,4 @@ public interface DatabaseUpdater
*/
public void updateUnprocessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException;
/**
* Update all previously processed content.
*
* This is done to allow archiva to remove content from the database that
* may have been removed from the filesystem too.
*
* @throws ArchivaDatabaseException if there was a fatal error with the database.
*/
public void updateAllProcessed()
throws ArchivaDatabaseException;
/**
* Update specific processed content.
*
* Example: This is done to allow a specific artifact to be removed from the
* database if it no longer exists on the filesystem.
*
* @throws ArchivaDatabaseException if there was a fatal error with the database.
*/
public void updateProcessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException;
}

View File

@ -26,7 +26,6 @@ import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.collections.functors.NotPredicate;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
@ -60,13 +59,6 @@ public class JdoDatabaseUpdater
private ProcessArchivaArtifactClosure processArtifactClosure = new ProcessArchivaArtifactClosure();
public void update()
throws ArchivaDatabaseException
{
updateAllUnprocessed();
updateAllProcessed();
}
public void updateAllUnprocessed()
throws ArchivaDatabaseException
{
@ -92,31 +84,6 @@ public class JdoDatabaseUpdater
}
}
public void updateAllProcessed()
throws ArchivaDatabaseException
{
List processedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( true ) );
beginConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
try
{
// Process each consumer.
Predicate predicate = NotPredicate.getInstance( UnprocessedArtifactPredicate.getInstance() );
Iterator it = IteratorUtils.filteredIterator( processedArtifacts.iterator(), predicate );
while ( it.hasNext() )
{
ArchivaArtifact artifact = (ArchivaArtifact) it.next();
updateProcessed( artifact );
}
}
finally
{
endConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
}
}
private void endConsumerLifecycle( List consumers )
{
Iterator it = consumers.iterator();
@ -154,19 +121,4 @@ public class JdoDatabaseUpdater
artifact.getModel().setWhenProcessed( new Date() );
dao.getArtifactDAO().saveArtifact( artifact );
}
public void updateProcessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException
{
List consumers = dbConsumers.getSelectedCleanupConsumers();
if ( CollectionUtils.isEmpty( consumers ) )
{
log.warn( "There are no selected consumers for artifact cleanup." );
return;
}
this.processArtifactClosure.setArtifact( artifact );
CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
}
}

View File

@ -19,18 +19,6 @@ package org.apache.maven.archiva.database;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.jpox.SchemaTool;
import java.io.File;
import java.net.URL;
import java.text.SimpleDateFormat;
@ -42,6 +30,16 @@ import java.util.Properties;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.VersionedReference;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.jpox.SchemaTool;
/**
* AbstractArchivaDatabaseTestCase
*
@ -133,15 +131,6 @@ public abstract class AbstractArchivaDatabaseTestCase
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
}
protected TestDatabaseCleanupConsumer lookupTestCleanupConsumer()
throws Exception
{
TestDatabaseCleanupConsumer consumer = (TestDatabaseCleanupConsumer) lookup( DatabaseCleanupConsumer.class,
"test-db-cleanup" );
assertNotNull( "Test Database Cleanup Consumer should not be null.", consumer );
return consumer;
}
protected TestDatabaseUnprocessedConsumer lookupTestUnprocessedConsumer()
throws Exception
{

View File

@ -41,6 +41,7 @@ public class RepositoryDatabaseEventListenerTest
listener = (RepositoryListener) lookup( RepositoryListener.class.getName(), "database" );
}
@SuppressWarnings("unchecked")
public void testWiring()
{
List<RepositoryListener> listeners =
@ -51,10 +52,10 @@ public class RepositoryDatabaseEventListenerTest
assertEquals( listener, listeners.get( 0 ) );
}
public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao )
public ArchivaArtifact createArtifact( String artifactId, String version, ArtifactDAO artifactDao, String type )
{
ArchivaArtifact artifact =
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", "jar" );
artifactDao.createArtifact( "org.apache.maven.archiva.test", artifactId, version, "", type );
artifact.getModel().setLastModified( new Date() );
artifact.getModel().setRepositoryId( "testable_repo" );
return artifact;
@ -65,17 +66,18 @@ public class RepositoryDatabaseEventListenerTest
{
ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
// Setup artifacts in fresh DB.
ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao );
artifactDao.saveArtifact( artifact );
ArchivaArtifact pomArtifact = createPom( artifactDao );
ArchivaArtifact jarArtifact = createJar( artifactDao );
assertEquals( artifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null,
"jar" ) );
assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "pom" ) );
assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "jar" ) );
artifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
jarArtifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "jar" );
ManagedRepositoryContent repository =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
listener.deleteArtifact( repository, artifact );
listener.deleteArtifact( repository, jarArtifact );
try
{
@ -86,5 +88,56 @@ public class RepositoryDatabaseEventListenerTest
{
assertTrue( true );
}
assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "pom" ) );
}
private ArchivaArtifact createJar( ArtifactDAO artifactDao )
throws ArchivaDatabaseException
{
ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao, "jar" );
artifactDao.saveArtifact( artifact );
return artifact;
}
public void testDeletePomArtifact()
throws Exception
{
ArtifactDAO artifactDao = (ArtifactDAO) lookup( ArtifactDAO.class.getName(), "jdo" );
ArchivaArtifact pomArtifact = createPom( artifactDao );
ArchivaArtifact jarArtifact = createJar( artifactDao );
assertEquals( pomArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "pom" ) );
assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "jar" ) );
pomArtifact = new ArchivaArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "pom" );
ManagedRepositoryContent repository =
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class.getName(), "default" );
listener.deleteArtifact( repository, pomArtifact );
try
{
artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0", null, "pom" );
fail( "Should not find artifact" );
}
catch ( ObjectNotFoundException e )
{
assertTrue( true );
}
assertEquals( jarArtifact, artifactDao.getArtifact( "org.apache.maven.archiva.test", "test-artifact", "1.0",
null, "jar" ) );
}
private ArchivaArtifact createPom( ArtifactDAO artifactDao )
throws ArchivaDatabaseException
{
ArchivaArtifact artifact = createArtifact( "test-artifact", "1.0", artifactDao, "pom" );
artifactDao.saveArtifact( artifact );
return artifact;
}
}

View File

@ -40,16 +40,6 @@ public class DatabaseConsumersTest
return dbconsumers;
}
public void testGetAvailableCleanupConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getAvailableCleanupConsumers();
assertNotNull( "Available Cleanup Consumers should never be null.", available );
assertTrue( "Available Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetAvailableUnprocessedConsumers()
throws Exception
{
@ -60,16 +50,6 @@ public class DatabaseConsumersTest
assertTrue( "Available Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetSelectedCleanupConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getSelectedCleanupConsumers();
assertNotNull( "Selected Cleanup Consumers should never be null.", available );
assertTrue( "Selected Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetSelectedUnprocessedConsumers()
throws Exception
{

View File

@ -1,98 +0,0 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.List;
/**
* TestDatabaseCleanupConsumer
*
* @version $Id$
*/
public class TestDatabaseCleanupConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Cleanup";
}
public String getId()
{
return "test-db-cleanup";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -15,12 +15,6 @@
</configuration>
</component>
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
<role-hint>test-db-cleanup</role-hint>
<implementation>org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer</implementation>
</component>
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>

View File

@ -73,16 +73,6 @@ public class ArchivaDatabaseUpdateTaskExecutor
throw new TaskExecutionException( "Error running unprocessed updater", e );
}
try
{
log.info( "Task: Updating processed artifacts" );
databaseUpdater.updateAllProcessed();
}
catch ( ArchivaDatabaseException e )
{
throw new TaskExecutionException( "Error running processed updater", e );
}
time = System.currentTimeMillis() - time;
log.info( "Finished database task in " + time + "ms." );

View File

@ -1,99 +0,0 @@
package org.apache.maven.archiva.scheduled;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.List;
/**
* TestDatabaseCleanupConsumer
*
* @version $Id$
*/
public class TestDatabaseCleanupConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Cleanup";
}
public String getId()
{
return "test-db-cleanup";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -66,12 +66,6 @@
</configuration>
</component>
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer</role>
<role-hint>test-db-cleanup</role-hint>
<implementation>org.apache.maven.archiva.scheduled.TestDatabaseCleanupConsumer</implementation>
</component>
<component>
<role>org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>

View File

@ -19,7 +19,9 @@ package org.apache.maven.archiva.web.action.admin.database;
* under the License.
*/
import com.opensymphony.xwork2.Preparable;
import java.util.Collections;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
@ -27,17 +29,16 @@ import org.apache.maven.archiva.configuration.DatabaseScanningConfiguration;
import org.apache.maven.archiva.configuration.IndeterminateConfigurationException;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.apache.maven.archiva.web.action.admin.scanning.AdminRepositoryConsumerComparator;
import org.apache.maven.archiva.web.action.PlexusActionSupport;
import org.apache.maven.archiva.web.action.admin.scanning.AdminRepositoryConsumerComparator;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.plexus.registry.RegistryException;
import java.util.Collections;
import java.util.List;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import com.opensymphony.xwork2.Preparable;
/**
* DatabaseAction
*
@ -70,16 +71,6 @@ public class DatabaseAction
*/
private List enabledUnprocessedConsumers;
/**
* List of {@link AdminDatabaseConsumer} objects for "to cleanup" artifacts.
*/
private List cleanupConsumers;
/**
* List of enabled {@link AdminDatabaseConsumer} objects for "to cleanup" artifacts.
*/
private List enabledCleanupConsumers;
public void prepare()
throws Exception
{
@ -94,11 +85,6 @@ public class DatabaseAction
CollectionUtils.forAllDo( databaseConsumers.getAvailableUnprocessedConsumers(), addAdminDbConsumer );
this.unprocessedConsumers = addAdminDbConsumer.getList();
Collections.sort( this.unprocessedConsumers, AdminRepositoryConsumerComparator.getInstance() );
addAdminDbConsumer = new AddAdminDatabaseConsumerClosure( dbscanning.getCleanupConsumers() );
CollectionUtils.forAllDo( databaseConsumers.getAvailableCleanupConsumers(), addAdminDbConsumer );
this.cleanupConsumers = addAdminDbConsumer.getList();
Collections.sort( this.cleanupConsumers, AdminRepositoryConsumerComparator.getInstance() );
}
public String updateUnprocessedConsumers()
@ -109,13 +95,6 @@ public class DatabaseAction
return saveConfiguration();
}
public String updateCleanupConsumers()
{
archivaConfiguration.getConfiguration().getDatabaseScanning().setCleanupConsumers( enabledCleanupConsumers );
return saveConfiguration();
}
public String updateSchedule()
{
archivaConfiguration.getConfiguration().getDatabaseScanning().setCronExpression( cron );
@ -166,11 +145,6 @@ public class DatabaseAction
this.cron = cron;
}
public List getCleanupConsumers()
{
return cleanupConsumers;
}
public List getUnprocessedConsumers()
{
return unprocessedConsumers;
@ -185,14 +159,4 @@ public class DatabaseAction
{
this.enabledUnprocessedConsumers = enabledUnprocessedConsumers;
}
public List getEnabledCleanupConsumers()
{
return enabledCleanupConsumers;
}
public void setEnabledCleanupConsumers( List enabledCleanupConsumers )
{
this.enabledCleanupConsumers = enabledCleanupConsumers;
}
}

View File

@ -116,62 +116,6 @@
</c:otherwise>
</c:choose>
<h2>Database - Artifact Cleanup Scanning</h2>
<c:choose>
<c:when test="${empty (cleanupConsumers)}">
<%-- No Consumers. Eeek! --%>
<strong>There are no consumers for artifact cleanup.</strong>
</c:when>
<c:otherwise>
<%-- Display the consumers. --%>
<s:form method="post" action="database!updateCleanupConsumers"
namespace="/admin" validate="false" theme="simple">
<table class="consumers">
<tr>
<th>&nbsp;</th>
<th>Enabled?</th>
<th>ID</th>
<th>Description</th>
</tr>
<c:forEach items="${cleanupConsumers}" var="consumer" varStatus="i">
<c:choose>
<c:when test='${(i.index)%2 eq 0}'>
<c:set var="bgcolor" value="even" scope="page" />
</c:when>
<c:otherwise>
<c:set var="bgcolor" value="odd" scope="page" />
</c:otherwise>
</c:choose>
<tr>
<td class="${bgcolor}">
<input type="checkbox" name="enabledCleanupConsumers" theme="simple" value="${consumer.id}" <c:if test="${consumer.enabled}">checked</c:if> />
</td>
<td class="${bgcolor}">
<c:if test="${consumer.enabled}">
<strong>enabled</strong>
</c:if>
</td>
<td class="${bgcolor}">
<code>${consumer.id}</code>
</td>
<td class="${bgcolor}">${consumer.description}</td>
</tr>
</c:forEach>
<tr>
<td colspan="4">
<s:submit value="Update Consumers" />
</td>
</tr>
</table>
</s:form>
</c:otherwise>
</c:choose>
</div>
</div>
</body>

View File

@ -39,7 +39,6 @@ import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.constraints.ArtifactVersionsConstraint;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
@ -98,24 +97,11 @@ public class AdministrationServiceImpl
*/
public Boolean configureDatabaseConsumer( String consumerId, boolean enable ) throws Exception
{
List<DatabaseCleanupConsumer> cleanupConsumers = dbConsumersUtil.getAvailableCleanupConsumers();
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers =
dbConsumersUtil.getAvailableUnprocessedConsumers();
boolean found = false;
boolean isCleanupConsumer = false;
for( DatabaseCleanupConsumer consumer : cleanupConsumers )
{
if( consumer.getId().equals( consumerId ) )
{
found = true;
isCleanupConsumer = true;
break;
}
}
if( !found )
{
for( DatabaseUnprocessedArtifactConsumer consumer : unprocessedConsumers )
{
if( consumer.getId().equals( consumerId ) )
@ -124,7 +110,6 @@ public class AdministrationServiceImpl
break;
}
}
}
if( !found )
{
@ -134,14 +119,7 @@ public class AdministrationServiceImpl
Configuration config = archivaConfiguration.getConfiguration();
DatabaseScanningConfiguration dbScanningConfig = config.getDatabaseScanning();
if( isCleanupConsumer )
{
dbScanningConfig.addCleanupConsumer( consumerId );
}
else
{
dbScanningConfig.addUnprocessedConsumer( consumerId );
}
config.setDatabaseScanning( dbScanningConfig );
saveConfiguration( config );
@ -329,14 +307,8 @@ public class AdministrationServiceImpl
{
List<String> consumers = new ArrayList<String>();
List<DatabaseCleanupConsumer> cleanupConsumers = dbConsumersUtil.getAvailableCleanupConsumers();
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers = dbConsumersUtil.getAvailableUnprocessedConsumers();
for( DatabaseCleanupConsumer consumer : cleanupConsumers )
{
consumers.add( consumer.getId() );
}
for( DatabaseUnprocessedArtifactConsumer consumer : unprocessedConsumers )
{
consumers.add( consumer.getId() );

View File

@ -40,7 +40,6 @@ import org.apache.maven.archiva.configuration.RepositoryScanningConfiguration;
import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.database.updater.DatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.DatabaseConsumers;
import org.apache.maven.archiva.database.updater.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
@ -122,12 +121,6 @@ public class AdministrationServiceImplTest
private RepositoryListener listener;
private DatabaseCleanupConsumer cleanupIndexConsumer;
private DatabaseCleanupConsumer cleanupDbConsumer;
private MockControl cleanupConsumersControl;
protected void setUp()
throws Exception
{
@ -158,10 +151,6 @@ public class AdministrationServiceImplTest
dbConsumersUtilControl = MockClassControl.createControl( DatabaseConsumers.class );
dbConsumersUtil = ( DatabaseConsumers ) dbConsumersUtilControl.getMock();
cleanupConsumersControl = MockControl.createControl( DatabaseCleanupConsumer.class );
cleanupIndexConsumer = (DatabaseCleanupConsumer) cleanupConsumersControl.getMock();
cleanupDbConsumer = (DatabaseCleanupConsumer) cleanupConsumersControl.getMock();
unprocessedConsumersControl = MockControl.createControl( DatabaseUnprocessedArtifactConsumer.class );
processArtifactConsumer = ( DatabaseUnprocessedArtifactConsumer ) unprocessedConsumersControl.getMock();
processPomConsumer = ( DatabaseUnprocessedArtifactConsumer ) unprocessedConsumersControl.getMock();
@ -189,19 +178,15 @@ public class AdministrationServiceImplTest
recordDbConsumers();
dbConsumersUtilControl.replay();
cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
List<String> dbConsumers = service.getAllDatabaseConsumers();
dbConsumersUtilControl.verify();
cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
assertNotNull( dbConsumers );
assertEquals( 4, dbConsumers.size() );
assertTrue( dbConsumers.contains( "cleanup-index" ) );
assertTrue( dbConsumers.contains( "cleanup-database" ) );
assertEquals( 2, dbConsumers.size() );
assertTrue( dbConsumers.contains( "process-artifact" ) );
assertTrue( dbConsumers.contains( "process-pom" ) );
}
@ -228,7 +213,6 @@ public class AdministrationServiceImplTest
archivaConfigControl.setVoidCallable();
dbConsumersUtilControl.replay();
cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
archivaConfigControl.replay();
configControl.replay();
@ -244,14 +228,12 @@ public class AdministrationServiceImplTest
}
dbConsumersUtilControl.verify();
cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
archivaConfigControl.verify();
configControl.verify();
// test disable "process-pom" db consumer
dbConsumersUtilControl.reset();
cleanupConsumersControl.reset();
unprocessedConsumersControl.reset();
archivaConfigControl.reset();
configControl.reset();
@ -271,7 +253,6 @@ public class AdministrationServiceImplTest
archivaConfigControl.setVoidCallable();
dbConsumersUtilControl.replay();
cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
archivaConfigControl.replay();
configControl.replay();
@ -287,7 +268,6 @@ public class AdministrationServiceImplTest
}
dbConsumersUtilControl.verify();
cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
archivaConfigControl.verify();
configControl.verify();
@ -299,7 +279,6 @@ public class AdministrationServiceImplTest
recordDbConsumers();
dbConsumersUtilControl.replay();
cleanupConsumersControl.replay();
unprocessedConsumersControl.replay();
try
@ -313,7 +292,6 @@ public class AdministrationServiceImplTest
}
dbConsumersUtilControl.verify();
cleanupConsumersControl.verify();
unprocessedConsumersControl.verify();
}
@ -875,19 +853,11 @@ public class AdministrationServiceImplTest
private void recordDbConsumers()
{
List<DatabaseCleanupConsumer> cleanupConsumers = new ArrayList<DatabaseCleanupConsumer>();
cleanupConsumers.add( cleanupIndexConsumer );
cleanupConsumers.add( cleanupDbConsumer );
List<DatabaseUnprocessedArtifactConsumer> unprocessedConsumers =
new ArrayList<DatabaseUnprocessedArtifactConsumer>();
unprocessedConsumers.add( processArtifactConsumer );
unprocessedConsumers.add( processPomConsumer );
dbConsumersUtilControl.expectAndReturn( dbConsumersUtil.getAvailableCleanupConsumers(), cleanupConsumers );
cleanupConsumersControl.expectAndReturn( cleanupIndexConsumer.getId(), "cleanup-index" );
cleanupConsumersControl.expectAndReturn( cleanupDbConsumer.getId(), "cleanup-database" );
dbConsumersUtilControl.expectAndReturn( dbConsumersUtil.getAvailableUnprocessedConsumers(), unprocessedConsumers );
unprocessedConsumersControl.expectAndReturn( processArtifactConsumer.getId(), "process-artifact" );
unprocessedConsumersControl.expectAndReturn( processPomConsumer.getId(), "process-pom" );