[MRM-346]: Show Artifact results in error 500.

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@539145 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2007-05-17 22:16:57 +00:00
parent 45ab16622e
commit 2ae3268ff9
31 changed files with 1202 additions and 276 deletions

View File

@ -65,22 +65,21 @@
<groupId>org.codehaus.plexus.registry</groupId> <groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-api</artifactId> <artifactId>plexus-registry-api</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-container-default</artifactId>
</dependency>
<!-- Test Deps -->
<dependency> <dependency>
<groupId>org.codehaus.plexus.registry</groupId> <groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId> <artifactId>plexus-registry-commons</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- Test Deps -->
<dependency> <dependency>
<groupId>easymock</groupId> <groupId>easymock</groupId>
<artifactId>easymock</artifactId> <artifactId>easymock</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-container-default</artifactId>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>
<plugins> <plugins>

View File

@ -25,9 +25,8 @@ import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils; import org.apache.commons.lang.math.NumberUtils;
import org.apache.maven.archiva.xml.XMLException; import org.apache.maven.archiva.xml.XMLException;
import org.apache.maven.archiva.xml.XMLReader; import org.apache.maven.archiva.xml.XMLReader;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.logging.console.ConsoleLogger;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@ -42,27 +41,28 @@ import java.net.URL;
* *
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a> * @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$ * @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.configuration.ConfigurationUpgrade"
* role-hint="default"
*/ */
public class ConfigurationUpgrade public class ConfigurationUpgrade
extends AbstractLogEnabled
implements Initializable
{ {
public static final int CURRENT_CONFIG_VERSION = 1; public static final int CURRENT_CONFIG_VERSION = 1;
/* NOTE: This component should *NOT USE* the configuration api to do it's upgrade */ private Logger logger;
public void initialize() /**
throws InitializationException * Perform the upgrade (if needed).
*
* NOTE: This component should *NOT USE* the configuration api to do it's upgrade
*
* @return true if the upgrade modified the archiva.xml file. false otherwise.
*/
public boolean perform()
{ {
File userConfigFile = new File( System.getProperty( "user.home" ), ".m2/archiva.xml" ); File userConfigFile = new File( System.getProperty( "user.home" ), ".m2/archiva.xml" );
if ( !userConfigFile.exists() ) if ( !userConfigFile.exists() )
{ {
writeDefaultConfigFile( userConfigFile ); writeDefaultConfigFile( userConfigFile );
return; return true;
} }
boolean configOk = false; boolean configOk = false;
@ -85,6 +85,7 @@ public class ConfigurationUpgrade
catch ( XMLException e ) catch ( XMLException e )
{ {
getLogger().warn( "Unable to read user configuration XML: " + e.getMessage(), e ); getLogger().warn( "Unable to read user configuration XML: " + e.getMessage(), e );
return false;
} }
if ( !configOk ) if ( !configOk )
@ -93,14 +94,15 @@ public class ConfigurationUpgrade
{ {
FileUtils.copyFile( userConfigFile, new File( userConfigFile.getAbsolutePath() + ".bak" ) ); FileUtils.copyFile( userConfigFile, new File( userConfigFile.getAbsolutePath() + ".bak" ) );
writeDefaultConfigFile( userConfigFile ); writeDefaultConfigFile( userConfigFile );
return true;
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger().warn( "Unable to create backup of your configuration file: " getLogger().warn( "Unable to create backup of your configuration file: " + e.getMessage(), e );
+ e.getMessage(), e );
} }
} }
return false;
} }
private void upgradeVersion( File userConfigFile, XMLReader xml ) private void upgradeVersion( File userConfigFile, XMLReader xml )
@ -126,8 +128,7 @@ public class ConfigurationUpgrade
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger().warn( "Unable to write default (generic) configuration file: " getLogger().warn( "Unable to write default (generic) configuration file: " + e.getMessage(), e );
+ e.getMessage(), e );
} }
} }
@ -147,4 +148,18 @@ public class ConfigurationUpgrade
} }
} }
public Logger getLogger()
{
if ( logger == null )
{
logger = new ConsoleLogger( ConsoleLogger.LEVEL_INFO, this.getClass().getName() );
}
return logger;
}
public void setLogger( Logger logger )
{
this.logger = logger;
}
} }

View File

@ -21,6 +21,7 @@ package org.apache.maven.archiva.configuration;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader; import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryWriter; import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryWriter;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
@ -36,6 +37,7 @@ import java.util.Iterator;
* @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration" * @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration"
*/ */
public class DefaultArchivaConfiguration public class DefaultArchivaConfiguration
extends AbstractLogEnabled
implements ArchivaConfiguration, RegistryListener, Initializable implements ArchivaConfiguration, RegistryListener, Initializable
{ {
/** /**
@ -56,19 +58,26 @@ public class DefaultArchivaConfiguration
{ {
if ( configuration == null ) if ( configuration == null )
{ {
// TODO: should this be the same as section? make sure unnamed sections still work (eg, sys properties) configuration = load();
configuration = new ConfigurationRegistryReader().read( registry.getSubset( KEY ) );
// TODO: for commons-configuration 1.3 only
for ( Iterator i = configuration.getRepositories().iterator(); i.hasNext(); )
{
RepositoryConfiguration c = (RepositoryConfiguration) i.next();
c.setUrl( removeExpressions( c.getUrl() ) );
}
} }
return configuration; return configuration;
} }
private Configuration load()
{
// TODO: should this be the same as section? make sure unnamed sections still work (eg, sys properties)
Configuration config = new ConfigurationRegistryReader().read( registry.getSubset( KEY ) );
// TODO: for commons-configuration 1.3 only
for ( Iterator i = config.getRepositories().iterator(); i.hasNext(); )
{
RepositoryConfiguration c = (RepositoryConfiguration) i.next();
c.setUrl( removeExpressions( c.getUrl() ) );
}
return config;
}
public void save( Configuration configuration ) public void save( Configuration configuration )
throws RegistryException throws RegistryException
{ {
@ -101,6 +110,13 @@ public class DefaultArchivaConfiguration
throws InitializationException throws InitializationException
{ {
registry.addChangeListener( this ); registry.addChangeListener( this );
ConfigurationUpgrade upgrade = new ConfigurationUpgrade();
upgrade.setLogger( getLogger() );
if ( upgrade.perform() )
{
this.configuration = load();
}
} }
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue ) public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
@ -115,10 +131,10 @@ public class DefaultArchivaConfiguration
private String removeExpressions( String directory ) private String removeExpressions( String directory )
{ {
String value = StringUtils.replace( directory, "${appserver.base}", String value = StringUtils.replace( directory, "${appserver.base}", registry.getString( "appserver.base",
registry.getString( "appserver.base", "${appserver.base}" ) ); "${appserver.base}" ) );
value = StringUtils.replace( value, "${appserver.home}", value = StringUtils.replace( value, "${appserver.home}", registry.getString( "appserver.home",
registry.getString( "appserver.home", "${appserver.home}" ) ); "${appserver.home}" ) );
return value; return value;
} }

View File

@ -0,0 +1,10 @@
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=INFO, A1
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
# A1 uses PatternLayout.
log4j.appender.A1.layout=org.apache.log4j.PatternLayout
log4j.appender.A1.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n

View File

@ -49,6 +49,10 @@
<groupId>commons-lang</groupId> <groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang</artifactId>
</dependency> </dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>

View File

@ -0,0 +1,59 @@
package org.apache.maven.archiva.model.functors;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.Predicate;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
/**
* Allows for selection of unprocessed artifacts.
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class UnprocessedArtifactPredicate
implements Predicate
{
private static UnprocessedArtifactPredicate INSTANCE = new UnprocessedArtifactPredicate();
public static UnprocessedArtifactPredicate getInstance()
{
return INSTANCE;
}
public boolean evaluate( Object object )
{
boolean satisfies = false;
if ( object instanceof ArchivaArtifact )
{
ArchivaArtifact artifact = (ArchivaArtifact) object;
satisfies = !artifact.getModel().isProcessed();
}
else if ( object instanceof ArchivaArtifactModel )
{
ArchivaArtifactModel model = (ArchivaArtifactModel) object;
satisfies = !model.isProcessed();
}
return satisfies;
}
}

View File

@ -98,6 +98,11 @@
<artifactId>derby</artifactId> <artifactId>derby</artifactId>
</dependency> </dependency>
<!-- TEST DEPS --> <!-- TEST DEPS -->
<dependency>
<groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>hsqldb</groupId> <groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId> <artifactId>hsqldb</artifactId>

View File

@ -25,6 +25,8 @@ import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint; import org.apache.maven.archiva.database.constraints.UniqueArtifactIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint; import org.apache.maven.archiva.database.constraints.UniqueGroupIdConstraint;
import org.apache.maven.archiva.database.constraints.UniqueVersionConstraint; import org.apache.maven.archiva.database.constraints.UniqueVersionConstraint;
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel; import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.logging.AbstractLogEnabled;
@ -37,7 +39,6 @@ import java.util.List;
* @version $Id$ * @version $Id$
* *
* @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing" * @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing"
* role-hint="default"
*/ */
public class DefaultRepositoryBrowsing public class DefaultRepositoryBrowsing
extends AbstractLogEnabled extends AbstractLogEnabled
@ -48,6 +49,11 @@ public class DefaultRepositoryBrowsing
*/ */
private ArchivaDAO dao; private ArchivaDAO dao;
/**
* @plexus.requirement role-hint="jdo"
*/
private DatabaseUpdater dbUpdater;
public BrowsingResults getRoot() public BrowsingResults getRoot()
{ {
List groups = dao.query( new UniqueGroupIdConstraint() ); List groups = dao.query( new UniqueGroupIdConstraint() );
@ -89,10 +95,51 @@ public class DefaultRepositoryBrowsing
public ArchivaProjectModel selectVersion( String groupId, String artifactId, String version ) public ArchivaProjectModel selectVersion( String groupId, String artifactId, String version )
throws ObjectNotFoundException, ArchivaDatabaseException throws ObjectNotFoundException, ArchivaDatabaseException
{ {
ArchivaProjectModel model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version ); ArchivaArtifact pomArtifact = null;
// TODO: if the model isn't found. load it from disk, insert into DB, and then return it. try
{
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, null, "pom" );
return model; if ( pomArtifact == null )
{
throw new ObjectNotFoundException( "Unable to find artifact [" + groupId + ":" + artifactId + ":"
+ version + "]" );
}
}
catch ( ObjectNotFoundException e )
{
throw e;
}
ArchivaProjectModel model;
if ( pomArtifact.getModel().isProcessed() )
{
// It's been processed. return it.
model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
return model;
}
// Process it.
dbUpdater.updateUnprocessed( pomArtifact );
// Find it.
try
{
model = dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version );
if ( model == null )
{
throw new ObjectNotFoundException( "Unable to find project model for [" + groupId + ":" + artifactId + ":"
+ version + "]" );
}
return model;
}
catch ( ObjectNotFoundException e )
{
throw e;
}
} }
} }

View File

@ -19,26 +19,21 @@ package org.apache.maven.archiva.database.updater;
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.DatabaseScanningConfiguration; import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.collections.functors.NotPredicate;
import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer; import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.database.ArchivaDAO; import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException; import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint; import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.functors.UnprocessedArtifactPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* JdoDatabaseUpdater * JdoDatabaseUpdater
@ -51,7 +46,7 @@ import java.util.Map;
*/ */
public class JdoDatabaseUpdater public class JdoDatabaseUpdater
extends AbstractLogEnabled extends AbstractLogEnabled
implements DatabaseUpdater, RegistryListener, Initializable implements DatabaseUpdater
{ {
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
@ -61,28 +56,9 @@ public class JdoDatabaseUpdater
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
private ArchivaConfiguration configuration; private DatabaseConsumers dbConsumers;
/** private ProcessArchivaArtifactClosure processArtifactClosure = new ProcessArchivaArtifactClosure();
* The collection of available consumers.
* @plexus.requirement role="org.apache.maven.archiva.consumers.ArchivaArtifactConsumer"
*/
private Map availableConsumers;
/**
* The list of active consumers for unprocessed content.
*/
private List activeUnprocessedConsumers = new ArrayList();
/**
* The list of active consumers for processed content.
*/
private List activeProcessedConsumers = new ArrayList();
/**
* The list of registry (configuration) property names that will trigger a refresh of the activeConsumers list.
*/
private List propertyNameTriggers = new ArrayList();
public void update() public void update()
throws ArchivaDatabaseException throws ArchivaDatabaseException
@ -96,56 +72,52 @@ public class JdoDatabaseUpdater
{ {
List unprocessedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( false ) ); List unprocessedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( false ) );
beginConsumerLifecycle( this.activeUnprocessedConsumers ); beginConsumerLifecycle( dbConsumers.getSelectedUnprocessedConsumers() );
try try
{ {
// Process each consumer. // Process each consumer.
Iterator it = unprocessedArtifacts.iterator(); Predicate predicate = UnprocessedArtifactPredicate.getInstance();
Iterator it = IteratorUtils.filteredIterator( unprocessedArtifacts.iterator(), predicate );
while ( it.hasNext() ) while ( it.hasNext() )
{ {
ArchivaArtifact artifact = (ArchivaArtifact) it.next(); ArchivaArtifact artifact = (ArchivaArtifact) it.next();
updateUnprocessed( artifact );
if ( !artifact.getModel().isProcessed() )
{
updateUnprocessed( artifact );
}
} }
} }
finally finally
{ {
consumerConsumerLifecycle( this.activeUnprocessedConsumers ); endConsumerLifecycle( dbConsumers.getSelectedUnprocessedConsumers() );
} }
} }
public void updateAllProcessed() public void updateAllProcessed()
throws ArchivaDatabaseException throws ArchivaDatabaseException
{ {
List processedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( true ) ); List processedArtifacts = dao.getArtifactDAO().queryArtifacts( new ArtifactsProcessedConstraint( true ) );
beginConsumerLifecycle( this.activeProcessedConsumers ); beginConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
try try
{ {
// Process each consumer. // Process each consumer.
Iterator it = processedArtifacts.iterator(); Predicate predicate = NotPredicate.getInstance( UnprocessedArtifactPredicate.getInstance() );
Iterator it = IteratorUtils.filteredIterator( processedArtifacts.iterator(), predicate );
while ( it.hasNext() ) while ( it.hasNext() )
{ {
ArchivaArtifact artifact = (ArchivaArtifact) it.next(); ArchivaArtifact artifact = (ArchivaArtifact) it.next();
updateProcessed( artifact );
if ( !artifact.getModel().isProcessed() )
{
updateProcessed( artifact );
}
} }
} }
finally finally
{ {
consumerConsumerLifecycle( this.activeProcessedConsumers ); endConsumerLifecycle( dbConsumers.getSelectedCleanupConsumers() );
} }
} }
private void consumerConsumerLifecycle( List consumers ) private void endConsumerLifecycle( List consumers )
{ {
Iterator it = consumers.iterator(); Iterator it = consumers.iterator();
while ( it.hasNext() ) while ( it.hasNext() )
@ -168,19 +140,16 @@ public class JdoDatabaseUpdater
public void updateUnprocessed( ArchivaArtifact artifact ) public void updateUnprocessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException throws ArchivaDatabaseException
{ {
Iterator it = this.activeUnprocessedConsumers.iterator(); List consumers = dbConsumers.getSelectedUnprocessedConsumers();
while ( it.hasNext() )
if ( CollectionUtils.isEmpty( consumers ) )
{ {
ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) it.next(); getLogger().warn( "There are no selected consumers for unprocessed artifacts." );
try return;
{
consumer.processArchivaArtifact( artifact );
}
catch ( ConsumerException e )
{
getLogger().warn( "Unable to consume (unprocessed) artifact: " + artifact );
}
} }
this.processArtifactClosure.setArtifact( artifact );
CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
artifact.getModel().setWhenProcessed( new Date() ); artifact.getModel().setWhenProcessed( new Date() );
dao.getArtifactDAO().saveArtifact( artifact ); dao.getArtifactDAO().saveArtifact( artifact );
@ -189,86 +158,15 @@ public class JdoDatabaseUpdater
public void updateProcessed( ArchivaArtifact artifact ) public void updateProcessed( ArchivaArtifact artifact )
throws ArchivaDatabaseException throws ArchivaDatabaseException
{ {
Iterator it = this.activeProcessedConsumers.iterator(); List consumers = dbConsumers.getSelectedCleanupConsumers();
while ( it.hasNext() )
{
ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) it.next();
try
{
consumer.processArchivaArtifact( artifact );
}
catch ( ConsumerException e )
{
getLogger().warn( "Unable to consume (processed) artifact: " + artifact );
}
}
}
private void updateActiveConsumers() if ( CollectionUtils.isEmpty( consumers ) )
{
this.activeUnprocessedConsumers.clear();
this.activeProcessedConsumers.clear();
DatabaseScanningConfiguration dbScanning = configuration.getConfiguration().getDatabaseScanning();
if ( dbScanning == null )
{ {
getLogger().error( "No Database Consumers found!" ); getLogger().warn( "There are no selected consumers for artifact cleanup." );
return; return;
} }
this.activeUnprocessedConsumers.addAll( getActiveConsumerList( dbScanning.getUnprocessedConsumers() ) ); this.processArtifactClosure.setArtifact( artifact );
this.activeProcessedConsumers.addAll( getActiveConsumerList( dbScanning.getCleanupConsumers() ) ); CollectionUtils.forAllDo( consumers, this.processArtifactClosure );
}
private List getActiveConsumerList( List potentialConsumerList )
{
if ( ( potentialConsumerList == null ) || ( potentialConsumerList.isEmpty() ) )
{
return Collections.EMPTY_LIST;
}
List ret = new ArrayList();
Iterator it = potentialConsumerList.iterator();
while ( it.hasNext() )
{
String consumerName = (String) it.next();
if ( !availableConsumers.containsKey( consumerName ) )
{
getLogger().warn( "Requested Consumer [" + consumerName + "] does not exist. Disabling." );
continue;
}
ret.add( consumerName );
}
return ret;
}
public void initialize()
throws InitializationException
{
propertyNameTriggers = new ArrayList();
propertyNameTriggers.add( "databaseScanning" );
propertyNameTriggers.add( "unprocessedConsumers" );
propertyNameTriggers.add( "unprocessedConsumer" );
propertyNameTriggers.add( "processedConsumers" );
propertyNameTriggers.add( "processedConsumer" );
configuration.addChangeListener( this );
updateActiveConsumers();
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( propertyNameTriggers.contains( propertyName ) )
{
updateActiveConsumers();
}
}
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
/* nothing to do here */
} }
} }

View File

@ -0,0 +1,73 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.logging.AbstractLogEnabled;
/**
* ProcessArchivaArtifactClosure
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.apache.commons.collections.Closure"
* role-hint="process-artifact"
* instantiation-strategy="per-lookup"
*/
class ProcessArchivaArtifactClosure
extends AbstractLogEnabled
implements Closure
{
private ArchivaArtifact artifact;
public void execute( Object input )
{
if ( input instanceof ArchivaArtifactConsumer )
{
ArchivaArtifactConsumer consumer = (ArchivaArtifactConsumer) input;
try
{
consumer.processArchivaArtifact( artifact );
}
catch ( ConsumerException e )
{
getLogger().warn(
"Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId()
+ "]" );
}
}
}
public ArchivaArtifact getArtifact()
{
return artifact;
}
public void setArtifact( ArchivaArtifact artifact )
{
this.artifact = artifact;
}
}

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.database;
* under the License. * under the License.
*/ */
import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer;
import org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer;
import org.codehaus.plexus.PlexusTestCase; import org.codehaus.plexus.PlexusTestCase;
import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory; import org.codehaus.plexus.jdo.DefaultConfigurableJdoFactory;
import org.codehaus.plexus.jdo.JdoFactory; import org.codehaus.plexus.jdo.JdoFactory;
@ -127,6 +131,25 @@ public class AbstractArchivaDatabaseTestCase
this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" ); this.dao = (ArchivaDAO) lookup( ArchivaDAO.class.getName(), "jdo" );
} }
protected TestDatabaseCleanupConsumer lookupTestCleanupConsumer()
throws Exception
{
TestDatabaseCleanupConsumer consumer = (TestDatabaseCleanupConsumer) lookup( DatabaseCleanupConsumer.class,
"test-db-cleanup" );
assertNotNull( "Test Database Cleanup Consumer should not be null.", consumer );
return consumer;
}
protected TestDatabaseUnprocessedConsumer lookupTestUnprocessedConsumer()
throws Exception
{
TestDatabaseUnprocessedConsumer consumer = (TestDatabaseUnprocessedConsumer) lookup(
DatabaseUnprocessedArtifactConsumer.class,
"test-db-unprocessed" );
assertNotNull( "Test Database Unprocessed Consumer should not be null.", consumer );
return consumer;
}
protected Date toDate( String txt ) protected Date toDate( String txt )
throws Exception throws Exception
{ {

View File

@ -94,7 +94,7 @@ public class RepositoryBrowsingTest
public RepositoryBrowsing lookupBrowser() public RepositoryBrowsing lookupBrowser()
throws Exception throws Exception
{ {
RepositoryBrowsing browser = (RepositoryBrowsing) lookup( RepositoryBrowsing.class.getName(), "default" ); RepositoryBrowsing browser = (RepositoryBrowsing) lookup( RepositoryBrowsing.class.getName() );
assertNotNull( "RepositoryBrowsing should not be null.", browser ); assertNotNull( "RepositoryBrowsing should not be null.", browser );
return browser; return browser;
} }

View File

@ -0,0 +1,84 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.collections.CollectionUtils;
import org.codehaus.plexus.PlexusTestCase;
import java.util.List;
/**
* DatabaseConsumersTest
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class DatabaseConsumersTest
extends PlexusTestCase
{
private DatabaseConsumers lookupDbConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = (DatabaseConsumers) lookup( DatabaseConsumers.class );
assertNotNull( "DatabaseConsumers should not be null.", dbconsumers );
return dbconsumers;
}
public void testGetAvailableCleanupConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getAvailableCleanupConsumers();
assertNotNull( "Available Cleanup Consumers should never be null.", available );
assertTrue( "Available Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetAvailableUnprocessedConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getAvailableUnprocessedConsumers();
assertNotNull( "Available Unprocessed Consumers should never be null.", available );
assertTrue( "Available Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetSelectedCleanupConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getSelectedCleanupConsumers();
assertNotNull( "Selected Cleanup Consumers should never be null.", available );
assertTrue( "Selected Cleanup Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
public void testGetSelectedUnprocessedConsumers()
throws Exception
{
DatabaseConsumers dbconsumers = lookupDbConsumers();
List available = dbconsumers.getSelectedUnprocessedConsumers();
assertNotNull( "Selected Unprocessed Consumers should never be null.", available );
assertTrue( "Selected Unprocessed Consumers should have entries.", CollectionUtils.isNotEmpty( available ) );
}
}

View File

@ -0,0 +1,103 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.database.AbstractArchivaDatabaseTestCase;
import org.apache.maven.archiva.database.ArtifactDAO;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.Date;
/**
* DatabaseUpdaterTest
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class DatabaseUpdaterTest
extends AbstractArchivaDatabaseTestCase
{
private DatabaseUpdater dbupdater;
public ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String whenProcessed )
throws Exception
{
ArchivaArtifact artifact = dao.getArtifactDAO().createArtifact( groupId, artifactId, version, "", "jar" );
assertNotNull( "Artifact should not be null.", artifact );
Date dateWhenProcessed = null;
if ( whenProcessed != null )
{
dateWhenProcessed = toDate( whenProcessed );
}
artifact.getModel().setWhenProcessed( dateWhenProcessed );
// Satisfy table / column requirements.
artifact.getModel().setLastModified( new Date() );
return artifact;
}
protected void setUp()
throws Exception
{
super.setUp();
ArtifactDAO adao = dao.getArtifactDAO();
assertNotNull( "Artifact DAO should not be null.", adao );
adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-common", "1.0-SNAPSHOT", null ) );
adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-utils", "1.0-SNAPSHOT", null ) );
adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-old", "0.1", "2004/02/15 9:01:00" ) );
adao.saveArtifact( createArtifact( "org.apache.maven.archiva", "archiva-database", "1.0-SNAPSHOT", null ) );
dbupdater = (DatabaseUpdater) lookup( DatabaseUpdater.class, "jdo" );
assertNotNull( "DatabaseUpdater should not be null.", dbupdater );
}
public void testUpdateUnprocessed()
throws Exception
{
String groupId = "org.apache.maven.archiva";
String artifactId = "archiva-utils";
String version = "1.0-SNAPSHOT";
String classifier = "";
String type = "jar";
TestDatabaseUnprocessedConsumer consumer = lookupTestUnprocessedConsumer();
consumer.resetCount();
// Check the state of the artifact in the DB.
ArchivaArtifact savedArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier,
type );
assertFalse( "Artifact should not be considered processed (yet).", savedArtifact.getModel().isProcessed() );
// Update the artifact
dbupdater.updateUnprocessed( savedArtifact );
// Check the update.
ArchivaArtifact processed = dao.getArtifactDAO().getArtifact( groupId, artifactId, version, classifier, type );
assertTrue( "Artifact should be flagged as processed.", processed.getModel().isProcessed() );
// Did the unprocessed consumer do it's thing?
assertEquals( "Processed Count.", 1, consumer.getCountProcessed() );
}
}

View File

@ -0,0 +1,100 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.List;
/**
* TestDatabaseCleanupConsumer
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class TestDatabaseCleanupConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Unprocessed";
}
public String getId()
{
return "test-db-unprocessed";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -0,0 +1,107 @@
package org.apache.maven.archiva.database.updater;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.ArrayList;
import java.util.List;
/**
* TestDatabaseUnprocessedConsumer
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class TestDatabaseUnprocessedConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
List types = new ArrayList();
types.add( "pom" );
types.add( "jar" );
return types;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
getLogger().info( "Processing Artifact: " + artifact );
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Unprocessed";
}
public String getId()
{
return "test-db-unprocessed";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -14,6 +14,18 @@
</otherProperties> </otherProperties>
</configuration> </configuration>
</component> </component>
<component>
<role>org.apache.maven.archiva.consumers.DatabaseCleanupConsumer</role>
<role-hint>test-db-cleanup</role-hint>
<implementation>org.apache.maven.archiva.database.updater.TestDatabaseCleanupConsumer</implementation>
</component>
<component>
<role>org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>
<implementation>org.apache.maven.archiva.database.updater.TestDatabaseUnprocessedConsumer</implementation>
</component>
<component> <component>
<role>org.codehaus.plexus.logging.LoggerManager</role> <role>org.codehaus.plexus.logging.LoggerManager</role>

View File

@ -60,38 +60,9 @@
</repository> </repository>
</repositories> </repositories>
<proxyConnectors> <proxyConnectors />
<proxyConnector>
<sourceRepoId>internal</sourceRepoId>
<targetRepoId>central</targetRepoId>
<proxyId />
<snapshotsPolicy>disabled</snapshotsPolicy>
<releasePolicy>never</releasePolicy>
<failurePolicy>not-found</failurePolicy>
</proxyConnector>
<proxyConnector>
<sourceRepoId>internal</sourceRepoId>
<targetRepoId>maven2-repository.dev.java.net</targetRepoId>
<proxyId />
<snapshotsPolicy>disabled</snapshotsPolicy>
<releasePolicy>never</releasePolicy>
<failurePolicy>not-found</failurePolicy>
<whiteListPatterns>
<whiteListPattern>javax/**</whiteListPattern>
</whiteListPatterns>
</proxyConnector>
</proxyConnectors>
<networkProxies> <networkProxies />
<networkProxy>
<id>example</id>
<protocol>http</protocol>
<host>proxy.mycompany.com</host>
<port>8080</port>
<username>myself</username>
<password>mypass</password>
</networkProxy>
</networkProxies>
<repositoryScanning> <repositoryScanning>
<fileTypes> <fileTypes>
@ -165,9 +136,16 @@
<databaseScanning> <databaseScanning>
<cronExpression>0 0 * * ?</cronExpression> <cronExpression>0 0 * * ?</cronExpression>
<unprocessedConsumers> <unprocessedConsumers>
<unprocessedConsumer>update-db-artifact</unprocessedConsumer> <unprocessedConsumer>test-db-unprocessed</unprocessedConsumer>
<unprocessedConsumer>index-artifact</unprocessedConsumer>
<unprocessedConsumer>update-db-project</unprocessedConsumer>
<unprocessedConsumer>validate-repository-metadata</unprocessedConsumer>
<unprocessedConsumer>index-archive-toc</unprocessedConsumer>
<unprocessedConsumer>update-db-bytecode-stats</unprocessedConsumer>
<unprocessedConsumer>index-public-methods</unprocessedConsumer>
</unprocessedConsumers> </unprocessedConsumers>
<cleanupConsumers> <cleanupConsumers>
<cleanupConsumer>test-db-cleanup</cleanupConsumer>
<cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer> <cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
<cleanupConsumer>not-present-remove-db-project</cleanupConsumer> <cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
<cleanupConsumer>not-present-remove-indexed</cleanupConsumer> <cleanupConsumer>not-present-remove-indexed</cleanupConsumer>

View File

@ -0,0 +1,29 @@
<?xml version="1.0" ?>
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
</components>
</component-set>

View File

@ -0,0 +1,29 @@
<?xml version="1.0" ?>
<component-set>
<components>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements>
<requirement>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.registry.Registry</role>
<role-hint>configured</role-hint>
<implementation>org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry</implementation>
<configuration>
<properties>
<system/>
<xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties>
</configuration>
</component>
</components>
</component-set>

View File

@ -66,21 +66,16 @@
<artifactId>plexus-registry-api</artifactId> <artifactId>plexus-registry-api</artifactId>
</dependency> </dependency>
<!-- Test Dependencies --> <!-- Test Dependencies -->
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-core-consumers</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.apache.maven.archiva</groupId> <groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-database-consumers</artifactId> <artifactId>archiva-database-consumers</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>hsqldb</groupId>
<artifactId>hsqldb</artifactId>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.codehaus.plexus.registry</groupId> <groupId>org.codehaus.plexus.registry</groupId>
<artifactId>plexus-registry-commons</artifactId> <artifactId>plexus-registry-commons</artifactId>

View File

@ -104,9 +104,9 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryContentStatistics stats = repoScanner.scan( arepo, sinceWhen ); RepositoryContentStatistics stats = repoScanner.scan( arepo, sinceWhen );
stats = (RepositoryContentStatistics) dao.save( stats );
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) ); getLogger().info( "Finished repository task: " + stats.toDump( arepo ) );
stats = (RepositoryContentStatistics) dao.save( stats );
} }
catch ( ArchivaDatabaseException e ) catch ( ArchivaDatabaseException e )
{ {

View File

@ -0,0 +1,100 @@
package org.apache.maven.archiva.scheduled;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseCleanupConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.List;
/**
* TestDatabaseCleanupConsumer
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class TestDatabaseCleanupConsumer
extends AbstractMonitoredConsumer
implements DatabaseCleanupConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
return null;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Unprocessed";
}
public String getId()
{
return "test-db-unprocessed";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -0,0 +1,107 @@
package org.apache.maven.archiva.scheduled;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.ArrayList;
import java.util.List;
/**
* TestDatabaseUnprocessedConsumer
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class TestDatabaseUnprocessedConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private int countBegin = 0;
private int countComplete = 0;
private int countProcessed = 0;
public void resetCount()
{
countBegin = 0;
countProcessed = 0;
countComplete = 0;
}
public void beginScan()
{
countBegin++;
}
public void completeScan()
{
countComplete++;
}
public List getIncludedTypes()
{
List types = new ArrayList();
types.add( "pom" );
types.add( "jar" );
return types;
}
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
getLogger().info( "Processing Artifact: " + artifact );
countProcessed++;
}
public String getDescription()
{
return "Test Consumer for Database Unprocessed";
}
public String getId()
{
return "test-db-unprocessed";
}
public boolean isPermanent()
{
return false;
}
public int getCountBegin()
{
return countBegin;
}
public int getCountComplete()
{
return countComplete;
}
public int getCountProcessed()
{
return countProcessed;
}
}

View File

@ -57,40 +57,40 @@ public class ArchivaDatabaseUpdateTaskExecutorTest
private TaskExecutor taskExecutor; private TaskExecutor taskExecutor;
protected ArchivaDAO dao; protected ArchivaDAO dao;
protected void setUp() protected void setUp()
throws Exception throws Exception
{ {
super.setUp(); super.setUp();
DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" ); DefaultConfigurableJdoFactory jdoFactory = (DefaultConfigurableJdoFactory) lookup( JdoFactory.ROLE, "archiva" );
assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() ); assertEquals( DefaultConfigurableJdoFactory.class.getName(), jdoFactory.getClass().getName() );
jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" ); jdoFactory.setPersistenceManagerFactoryClass( "org.jpox.PersistenceManagerFactoryImpl" );
/* derby version /* derby version
File derbyDbDir = new File( "target/plexus-home/testdb" ); File derbyDbDir = new File( "target/plexus-home/testdb" );
if ( derbyDbDir.exists() ) if ( derbyDbDir.exists() )
{ {
FileUtils.deleteDirectory( derbyDbDir ); FileUtils.deleteDirectory( derbyDbDir );
} }
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) ); jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.apache.derby.jdbc.EmbeddedDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) ); jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:derby:" + derbyDbDir.getAbsolutePath() + ";create=true" ) );
*/ */
jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) ); jdoFactory.setDriverName( System.getProperty( "jdo.test.driver", "org.hsqldb.jdbcDriver" ) );
jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) ); jdoFactory.setUrl( System.getProperty( "jdo.test.url", "jdbc:hsqldb:mem:" + getName() ) );
jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) ); jdoFactory.setUserName( System.getProperty( "jdo.test.user", "sa" ) );
jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" ); jdoFactory.setPassword( System.getProperty( "jdo.test.pass", "" ) );
jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" ); jdoFactory.setProperty( "org.jpox.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" ); jdoFactory.setProperty( "org.jpox.poid.transactionIsolation", "READ_COMMITTED" );
jdoFactory.setProperty( "org.jpox.autoCreateSchema", "true" );
jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" ); jdoFactory.setProperty( "javax.jdo.option.RetainValues", "true" );
@ -113,8 +113,7 @@ public class ArchivaDatabaseUpdateTaskExecutorTest
System.setProperty( (String) entry.getKey(), (String) entry.getValue() ); System.setProperty( (String) entry.getKey(), (String) entry.getValue() );
} }
URL jdoFileUrls[] = new URL[] { getClass() URL jdoFileUrls[] = new URL[] { getClass().getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
.getResource( "/org/apache/maven/archiva/model/package.jdo" ) };
if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) ) if ( ( jdoFileUrls == null ) || ( jdoFileUrls[0] == null ) )
{ {
@ -140,7 +139,8 @@ public class ArchivaDatabaseUpdateTaskExecutorTest
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-database-update" ); taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-database-update" );
} }
public void testExecutor() throws Exception public void testExecutor()
throws Exception
{ {
RepositoryDAO repoDao = dao.getRepositoryDAO(); RepositoryDAO repoDao = dao.getRepositoryDAO();
@ -151,8 +151,7 @@ public class ArchivaDatabaseUpdateTaskExecutorTest
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" ); String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
// Create it // Create it
ArchivaRepository repo = ArchivaRepository repo = repoDao.createRepository( "testRepo", "Test Repository", repoUri );
repoDao.createRepository( "testRepo", "Test Repository", repoUri );
assertNotNull( repo ); assertNotNull( repo );
// Set some mandatory values // Set some mandatory values
@ -166,35 +165,44 @@ public class ArchivaDatabaseUpdateTaskExecutorTest
assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() ); assertEquals( "testRepo", JDOHelper.getObjectId( repoSaved.getModel() ).toString() );
ArtifactDAO adao = dao.getArtifactDAO(); ArtifactDAO adao = dao.getArtifactDAO();
ArchivaArtifact sqlArtifact = adao.createArtifact( "javax.sql", "jdbc", "2.0", "", "jar" ); ArchivaArtifact sqlArtifact = adao.createArtifact( "javax.sql", "jdbc", "2.0", "", "jar" );
sqlArtifact.getModel().setLastModified( new Date() ); sqlArtifact.getModel().setLastModified( new Date() );
sqlArtifact.getModel().setSize( 1234 ); sqlArtifact.getModel().setSize( 1234 );
sqlArtifact.getModel().setOrigin( "testcase" ); sqlArtifact.getModel().setOrigin( "testcase" );
sqlArtifact.getModel().setWhenProcessed( null ); sqlArtifact.getModel().setWhenProcessed( null );
adao.saveArtifact( sqlArtifact ); adao.saveArtifact( sqlArtifact );
ArchivaArtifact artifact = adao.getArtifact( "javax.sql", "jdbc", "2.0", null, "jar" ); ArchivaArtifact artifact = adao.getArtifact( "javax.sql", "jdbc", "2.0", null, "jar" );
assertNotNull( artifact ); assertNotNull( artifact );
// Test for artifact existance.
List artifactList = adao.queryArtifacts( null );
assertNotNull( "Artifact list should not be null.", artifactList );
assertEquals( "Artifact list size", 1, artifactList.size() );
// Test for unprocessed artifacts.
List unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) ); List unprocessedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( false ) );
assertNotNull( "Unprocessed Results should not be null.", unprocessedResultList );
assertNotNull( unprocessedResultList ); assertEquals( "Incorrect number of unprocessed artifacts detected.", 1, unprocessedResultList.size() );
assertEquals("Incorrect number of unprocessed artifacts detected.", 1, unprocessedResultList.size() );
// Execute the database task.
DatabaseTask dataTask = new DatabaseTask(); DatabaseTask dataTask = new DatabaseTask();
dataTask.setName( "testDataTask" ); dataTask.setName( "testDataTask" );
taskExecutor.executeTask( dataTask ); taskExecutor.executeTask( dataTask );
List processedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( true ) ); // Test for artifact existance.
artifactList = adao.queryArtifacts( null );
assertNotNull( "Artifact list should not be null.", artifactList );
assertEquals( "Artifact list size", 1, artifactList.size() );
assertNotNull( processedResultList ); // Test for processed artifacts.
assertEquals("Incorrect number of processed artifacts detected.", 1, processedResultList.size() ); List processedResultList = adao.queryArtifacts( new ArtifactsProcessedConstraint( true ) );
assertNotNull( "Processed Results should not be null.", processedResultList );
assertEquals( "Incorrect number of processed artifacts detected.", 1, processedResultList.size() );
} }
} }

View File

@ -147,7 +147,6 @@ public class ArchivaRepositoryScanningTaskExecutorTest
assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() ); assertTrue( "Default Test Repository should exist.", repoDir.exists() && repoDir.isDirectory() );
String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" ); String repoUri = "file://" + StringUtils.replace( repoDir.getAbsolutePath(), "\\", "/" );
// Create it // Create it
ArchivaRepository repo = ArchivaRepository repo =

View File

@ -0,0 +1,122 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<configuration>
<repositories>
<repository>
<id>testRepo</id>
<name>Archiva Test Repository</name>
<url>file://${basedir}/src/test/repositories/default-repository</url>
<layout>default</layout>
<releases>true</releases>
<snapshots>false</snapshots>
<indexed>true</indexed>
<refreshCronExpression>0 0 * * ?</refreshCronExpression>
</repository>
</repositories>
<proxyConnectors />
<networkProxies />
<repositoryScanning>
<fileTypes>
<fileType>
<id>artifacts</id>
<patterns>
<pattern>**/*.pom</pattern>
<pattern>**/*.jar</pattern>
<pattern>**/*.ear</pattern>
<pattern>**/*.war</pattern>
<pattern>**/*.car</pattern>
<pattern>**/*.sar</pattern>
<pattern>**/*.mar</pattern>
<pattern>**/*.rar</pattern>
<pattern>**/*.dtd</pattern>
<pattern>**/*.tld</pattern>
<pattern>**/*.tar.gz</pattern>
<pattern>**/*.tar.bz2</pattern>
<pattern>**/*.zip</pattern>
</patterns>
</fileType>
<fileType>
<id>indexable-content</id>
<patterns>
<pattern>**/*.txt</pattern>
<pattern>**/*.TXT</pattern>
<pattern>**/*.block</pattern>
<pattern>**/*.config</pattern>
<pattern>**/*.pom</pattern>
<pattern>**/*.xml</pattern>
<pattern>**/*.xsd</pattern>
<pattern>**/*.dtd</pattern>
<pattern>**/*.tld</pattern>
</patterns>
</fileType>
<fileType>
<id>auto-remove</id>
<patterns>
<pattern>**/*.bak</pattern>
<pattern>**/*~</pattern>
<pattern>**/*-</pattern>
</patterns>
</fileType>
<fileType>
<id>ignored</id>
<patterns>
<pattern>**/.htaccess</pattern>
<pattern>**/KEYS</pattern>
<pattern>**/*.rb</pattern>
<pattern>**/*.sh</pattern>
<pattern>**/.svn/**</pattern>
<pattern>**/.DAV/**</pattern>
</patterns>
</fileType>
</fileTypes>
<knownContentConsumers>
<knownContentConsumer>update-db-artifact</knownContentConsumer>
<knownContentConsumer>create-missing-checksums</knownContentConsumer>
<knownContentConsumer>update-db-repository-metadata</knownContentConsumer>
<knownContentConsumer>validate-checksum</knownContentConsumer>
<knownContentConsumer>validate-signature</knownContentConsumer>
<knownContentConsumer>index-content</knownContentConsumer>
<knownContentConsumer>auto-remove</knownContentConsumer>
<knownContentConsumer>auto-rename</knownContentConsumer>
</knownContentConsumers>
<invalidContentConsumers>
<invalidContentConsumer>update-db-bad-content</invalidContentConsumer>
</invalidContentConsumers>
</repositoryScanning>
<databaseScanning>
<cronExpression>0 0 * * ?</cronExpression>
<unprocessedConsumers>
<unprocessedConsumer>test-db-unprocessed</unprocessedConsumer>
<unprocessedConsumer>update-db-artifact</unprocessedConsumer>
</unprocessedConsumers>
<cleanupConsumers>
<cleanupConsumer>test-db-cleanup</cleanupConsumer>
<cleanupConsumer>not-present-remove-db-artifact</cleanupConsumer>
<cleanupConsumer>not-present-remove-db-project</cleanupConsumer>
<cleanupConsumer>not-present-remove-indexed</cleanupConsumer>
</cleanupConsumers>
</databaseScanning>
</configuration>

View File

@ -36,7 +36,6 @@
<component> <component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role> <role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>test-configuration</role-hint>
<implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation> <implementation>org.apache.maven.archiva.configuration.DefaultArchivaConfiguration</implementation>
<requirements> <requirements>
<requirement> <requirement>
@ -53,12 +52,25 @@
<configuration> <configuration>
<properties> <properties>
<system/> <system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml" <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/> config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties> </properties>
</configuration> </configuration>
</component> </component>
<component>
<role>org.apache.maven.archiva.consumers.DatabaseCleanupConsumer</role>
<role-hint>test-db-cleanup</role-hint>
<implementation>org.apache.maven.archiva.scheduled.TestDatabaseCleanupConsumer</implementation>
</component>
<component>
<role>org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer</role>
<role-hint>test-db-unprocessed</role-hint>
<implementation>org.apache.maven.archiva.scheduled.TestDatabaseUnprocessedConsumer</implementation>
</component>
<component> <component>
<role>org.codehaus.plexus.jdo.JdoFactory</role> <role>org.codehaus.plexus.jdo.JdoFactory</role>
<role-hint>archiva</role-hint> <role-hint>archiva</role-hint>

View File

@ -61,7 +61,7 @@
<configuration> <configuration>
<properties> <properties>
<system/> <system/>
<xml fileName="${basedir}/src/test/conf/repository-manager.xml" <xml fileName="${basedir}/src/test/resources/archiva-test.xml"
config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/> config-name="org.apache.maven.archiva" config-at="org.apache.maven.archiva"/>
</properties> </properties>
</configuration> </configuration>

View File

@ -25,6 +25,7 @@ import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.database.ArchivaDAO; import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException; import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException; import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.database.browsing.RepositoryBrowsing;
import org.apache.maven.archiva.model.ArchivaProjectModel; import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.web.util.VersionMerger; import org.apache.maven.archiva.web.util.VersionMerger;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
@ -52,15 +53,10 @@ public class ShowArtifactAction
/* .\ Not Exposed \._____________________________________________ */ /* .\ Not Exposed \._____________________________________________ */
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="default"
*/ */
private ArchivaDAO dao; private RepositoryBrowsing repoBrowsing;
/**
* @plexus.requirement
*/
private ArchivaConfiguration archivaConfiguration;
/* .\ Input Parameters \.________________________________________ */ /* .\ Input Parameters \.________________________________________ */
private String groupId; private String groupId;
@ -193,9 +189,9 @@ public class ShowArtifactAction
} }
private ArchivaProjectModel readProject() private ArchivaProjectModel readProject()
throws ObjectNotFoundException, ArchivaDatabaseException throws ArchivaDatabaseException
{ {
return dao.getProjectModelDAO().getProjectModel( groupId, artifactId, version ); return repoBrowsing.selectVersion( groupId, artifactId, version );
} }
private boolean checkParameters() private boolean checkParameters()

View File

@ -20,10 +20,6 @@
<plexus> <plexus>
<load-on-start> <load-on-start>
<component>
<role>org.apache.maven.archiva.configuration.ConfigurationUpgrade</role>
<role-hint>default</role-hint>
</component>
<component> <component>
<role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role> <role>org.apache.maven.archiva.web.startup.ConfigurationSynchronization</role>
<role-hint>default</role-hint> <role-hint>default</role-hint>