MRM-708 - Migrate from Plexus Logging to Slf4J

* Changing AbstractLogEnabled for consumers.



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@629703 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2008-02-21 07:28:33 +00:00
parent 23203d1a11
commit 45fca34ec2
11 changed files with 113 additions and 88 deletions

View File

@ -19,8 +19,6 @@ package org.apache.maven.archiva.consumers;
* under the License.
*/
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
@ -32,7 +30,6 @@ import java.util.Set;
* @version $Id$
*/
public abstract class AbstractMonitoredConsumer
extends AbstractLogEnabled
implements BaseConsumer
{
private Set<ConsumerMonitor> monitors = new HashSet<ConsumerMonitor>();

View File

@ -19,6 +19,11 @@ package org.apache.maven.archiva.consumers.core;
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
@ -41,11 +46,8 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* MetadataUpdaterConsumer will create and update the metadata present within the repository.
@ -60,6 +62,8 @@ public class MetadataUpdaterConsumer
extends AbstractMonitoredConsumer
implements KnownRepositoryContentConsumer, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( MetadataUpdaterConsumer.class );
/**
* @plexus.configuration default-value="metadata-updater"
*/
@ -183,12 +187,12 @@ public class MetadataUpdaterConsumer
if ( projectMetadata.exists() && ( projectMetadata.lastModified() >= this.scanStartTimestamp ) )
{
// This metadata is up to date. skip it.
getLogger().debug( "Skipping uptodate metadata: " + this.metadataTools.toPath( projectRef ) );
log.debug( "Skipping uptodate metadata: " + this.metadataTools.toPath( projectRef ) );
return;
}
metadataTools.updateMetadata( this.repository, projectRef );
getLogger().debug( "Updated metadata: " + this.metadataTools.toPath( projectRef ) );
log.debug( "Updated metadata: " + this.metadataTools.toPath( projectRef ) );
}
catch ( LayoutException e )
{
@ -229,12 +233,12 @@ public class MetadataUpdaterConsumer
if ( projectMetadata.exists() && ( projectMetadata.lastModified() >= this.scanStartTimestamp ) )
{
// This metadata is up to date. skip it.
getLogger().debug( "Skipping uptodate metadata: " + this.metadataTools.toPath( versionRef ) );
log.debug( "Skipping uptodate metadata: " + this.metadataTools.toPath( versionRef ) );
return;
}
metadataTools.updateMetadata( this.repository, versionRef );
getLogger().debug( "Updated metadata: " + this.metadataTools.toPath( versionRef ) );
log.debug( "Updated metadata: " + this.metadataTools.toPath( versionRef ) );
}
catch ( LayoutException e )
{

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.consumers.database;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
@ -40,10 +44,8 @@ import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelFilter;
import org.apache.maven.archiva.repository.project.ProjectModelReader;
import org.apache.maven.archiva.repository.project.filters.EffectiveProjectModelFilter;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ProjectModelToDatabaseConsumer
@ -58,6 +60,8 @@ public class ProjectModelToDatabaseConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private Logger log = LoggerFactory.getLogger( ProjectModelToDatabaseConsumer.class );
/**
* @plexus.configuration default-value="update-db-project"
*/
@ -166,31 +170,31 @@ public class ProjectModelToDatabaseConsumer
if ( isValidModel( model, repo, artifact ) )
{
getLogger().debug( "Adding project model to database - " + Keys.toKey( model ) );
log.debug( "Adding project model to database - " + Keys.toKey( model ) );
dao.getProjectModelDAO().saveProjectModel( model );
}
else
{
getLogger().warn(
log.warn(
"Invalid or corrupt pom. Project model not added to database - " + Keys.toKey( model ) );
}
}
catch ( ProjectModelException e )
{
getLogger().warn( "Unable to read project model " + artifactFile + " : " + e.getMessage(), e );
log.warn( "Unable to read project model " + artifactFile + " : " + e.getMessage(), e );
addProblem( artifact, "Unable to read project model " + artifactFile + " : " + e.getMessage() );
}
catch ( ArchivaDatabaseException e )
{
getLogger().warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(),
log.warn( "Unable to save project model " + artifactFile + " to the database : " + e.getMessage(),
e );
}
catch ( Throwable t )
{
// Catch the other errors in the process to allow the rest of the process to complete.
getLogger().error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() +
log.error( "Unable to process model " + artifactFile + " due to : " + t.getClass().getName() +
" : " + t.getMessage(), t );
}
}
@ -256,7 +260,7 @@ public class ProjectModelToDatabaseConsumer
emsg.append( "]: The model artifactId [" ).append( model.getArtifactId() );
emsg.append( "] does not match the artifactId portion of the filename: " ).append( artifact.getArtifactId() );
getLogger().warn(emsg.toString() );
log.warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
@ -272,7 +276,7 @@ public class ProjectModelToDatabaseConsumer
emsg.append( "]; The model version [" ).append( model.getVersion() );
emsg.append( "] does not match the version portion of the filename: " ).append( artifact.getVersion() );
getLogger().warn(emsg.toString() );
log.warn(emsg.toString() );
addProblem( artifact, emsg.toString() );
return false;
@ -311,7 +315,7 @@ public class ProjectModelToDatabaseConsumer
catch ( ArchivaDatabaseException e )
{
String emsg = "Unable to save problem with artifact location to DB: " + e.getMessage();
getLogger().warn( emsg, e );
log.warn( emsg, e );
throw new ConsumerException( emsg, e );
}
}

View File

@ -19,6 +19,11 @@ package org.apache.maven.archiva.consumers.lucene;
* under the License.
*/
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -38,11 +43,8 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* IndexArtifactConsumer
@ -57,6 +59,8 @@ public class IndexArtifactConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( IndexArtifactConsumer.class );
private static final String INDEX_ERROR = "indexing_error";
/**
@ -198,7 +202,7 @@ public class IndexArtifactConsumer
}
catch ( RepositoryException e )
{
getLogger().error( "Unable to load repository content object: " + e.getMessage(), e );
log.error( "Unable to load repository content object: " + e.getMessage(), e );
}
}
}

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.converter.legacy;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
@ -32,9 +35,8 @@ import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* LegacyConverterArtifactConsumer - convert artifacts as they are found
@ -51,6 +53,8 @@ public class LegacyConverterArtifactConsumer
extends AbstractMonitoredConsumer
implements KnownRepositoryContentConsumer
{
private Logger log = LoggerFactory.getLogger( LegacyConverterArtifactConsumer.class );
/**
* @plexus.requirement role-hint="legacy-to-default"
*/
@ -112,11 +116,11 @@ public class LegacyConverterArtifactConsumer
}
catch ( LayoutException e )
{
getLogger().warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
log.warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
}
catch ( ArtifactConversionException e )
{
getLogger().warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
log.warn( "Unable to convert artifact: " + path + " : " + e.getMessage(), e );
}
}

View File

@ -19,6 +19,12 @@ package org.apache.maven.archiva.repository.scanner;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.Closure;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.functors.IfClosure;
@ -31,13 +37,8 @@ import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* RepositoryContentConsumerUtil
@ -48,8 +49,9 @@ import java.util.Map;
* @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers"
*/
public class RepositoryContentConsumers
extends AbstractLogEnabled
{
private Logger log = LoggerFactory.getLogger( RepositoryContentConsumers.class );
/**
* @plexus.requirement
*/
@ -261,7 +263,7 @@ public class RepositoryContentConsumers
// Run the repository consumers
try
{
Closure triggerBeginScan = new TriggerBeginScanClosure( repository, getLogger() );
Closure triggerBeginScan = new TriggerBeginScanClosure( repository );
List<KnownRepositoryContentConsumer> selectedKnownConsumers = getSelectedKnownConsumers();
List<InvalidRepositoryContentConsumer> selectedInvalidConsumers = getSelectedInvalidConsumers();
@ -273,7 +275,7 @@ public class RepositoryContentConsumers
BaseFile baseFile = new BaseFile( repository.getLocation(), localFile );
ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate();
predicate.setBasefile( baseFile );
ConsumerProcessFileClosure closure = new ConsumerProcessFileClosure( getLogger() );
ConsumerProcessFileClosure closure = new ConsumerProcessFileClosure();
closure.setBasefile( baseFile );
predicate.setCaseSensitive( false );
Closure processIfWanted = IfClosure.getInstance( predicate, closure );

View File

@ -22,7 +22,8 @@ package org.apache.maven.archiva.repository.scanner.functors;
import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.common.utils.BaseFile;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.codehaus.plexus.logging.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ConsumerProcessFileClosure
@ -33,17 +34,10 @@ import org.codehaus.plexus.logging.Logger;
public class ConsumerProcessFileClosure
implements Closure
{
private Logger log = LoggerFactory.getLogger( ConsumerProcessFileClosure.class );
private BaseFile basefile;
private Logger logger;
public ConsumerProcessFileClosure( Logger logger )
{
// Lame. I know, but seeing as plexus doesn't like to cleanup after
// application loaded/lookup'd components, this is the best I can do.
this.logger = logger;
}
public void execute( Object input )
{
if ( input instanceof RepositoryContentConsumer )
@ -52,7 +46,7 @@ public class ConsumerProcessFileClosure
try
{
logger.debug( "Sending to consumer: " + consumer.getId() );
log.debug( "Sending to consumer: " + consumer.getId() );
consumer.processFile( basefile.getRelativePath() );
}
@ -61,7 +55,7 @@ public class ConsumerProcessFileClosure
/* Intentionally Catch all exceptions.
* So that the discoverer processing can continue.
*/
logger.error( "Consumer [" + consumer.getId() + "] had an error when processing file ["
log.error( "Consumer [" + consumer.getId() + "] had an error when processing file ["
+ basefile.getAbsolutePath() + "]: " + e.getMessage(), e );
}
}
@ -79,11 +73,11 @@ public class ConsumerProcessFileClosure
public Logger getLogger()
{
return logger;
return log;
}
public void setLogger( Logger logger )
{
this.logger = logger;
this.log = logger;
}
}

View File

@ -19,13 +19,15 @@ package org.apache.maven.archiva.database.updater;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TestDatabaseUnprocessedConsumer
@ -37,6 +39,8 @@ public class TestDatabaseUnprocessedConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private Logger log = LoggerFactory.getLogger( TestDatabaseUnprocessedConsumer.class );
private int countBegin = 0;
private int countComplete = 0;
@ -71,7 +75,7 @@ public class TestDatabaseUnprocessedConsumer
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
getLogger().info( "Processing Artifact: " + artifact );
log.info( "Processing Artifact: " + artifact );
countProcessed++;
}

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.reporting.artifact;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -39,9 +42,8 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Search the database of known SHA1 Checksums for potential duplicate artifacts.
@ -56,6 +58,8 @@ public class DuplicateArtifactsConsumer
extends AbstractMonitoredConsumer
implements ArchivaArtifactConsumer, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( DuplicateArtifactsConsumer.class );
/**
* @plexus.configuration default-value="duplicate-artifacts"
*/
@ -131,12 +135,12 @@ public class DuplicateArtifactsConsumer
}
catch ( ObjectNotFoundException e )
{
getLogger().debug( "No duplicates for artifact: " + artifact );
log.debug( "No duplicates for artifact: " + artifact );
return;
}
catch ( ArchivaDatabaseException e )
{
getLogger().warn( "Unable to query DB for potential duplicates with : " + artifact );
log.warn( "Unable to query DB for potential duplicates with : " + artifact );
return;
}
@ -145,7 +149,7 @@ public class DuplicateArtifactsConsumer
if ( results.size() <= 1 )
{
// No duplicates detected.
getLogger().debug( "Found no duplicate artifact results on: " + artifact );
log.debug( "Found no duplicate artifact results on: " + artifact );
return;
}
@ -169,13 +173,13 @@ public class DuplicateArtifactsConsumer
try
{
getLogger().debug( "Found duplicate artifact: " + problem );
log.debug( "Found duplicate artifact: " + problem );
dao.getRepositoryProblemDAO().saveRepositoryProblem( problem );
}
catch ( ArchivaDatabaseException e )
{
String emsg = "Unable to save problem with duplicate artifact to DB: " + e.getMessage();
getLogger().warn( emsg, e );
log.warn( emsg, e );
throw new ConsumerException( emsg, e );
}
}
@ -192,7 +196,7 @@ public class DuplicateArtifactsConsumer
}
catch ( RepositoryException e )
{
getLogger().warn( "Unable to calculate path for artifact: " + artifact );
log.warn( "Unable to calculate path for artifact: " + artifact );
return "";
}
}

View File

@ -19,6 +19,16 @@ package org.apache.maven.archiva.reporting.artifact;
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -40,16 +50,8 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationExce
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import org.codehaus.plexus.util.SelectorUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Validate the location of the artifact based on the values indicated
@ -65,6 +67,8 @@ public class LocationArtifactsConsumer
extends AbstractMonitoredConsumer
implements ArchivaArtifactConsumer, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( LocationArtifactsConsumer.class );
/**
* @plexus.configuration default-value="duplicate-artifacts"
*/
@ -256,7 +260,7 @@ public class LocationArtifactsConsumer
catch ( ArchivaDatabaseException e )
{
String emsg = "Unable to save problem with artifact location to DB: " + e.getMessage();
getLogger().warn( emsg, e );
log.warn( emsg, e );
throw new ConsumerException( emsg, e );
}
}
@ -300,7 +304,7 @@ public class LocationArtifactsConsumer
}
catch ( RepositoryException e )
{
getLogger().warn( "Unable to calculate path for artifact: " + artifact );
log.warn( "Unable to calculate path for artifact: " + artifact );
return "";
}
}

View File

@ -19,13 +19,15 @@ package org.apache.maven.archiva.scheduled;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.DatabaseUnprocessedArtifactConsumer;
import org.apache.maven.archiva.model.ArchivaArtifact;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TestDatabaseUnprocessedConsumer
@ -37,6 +39,8 @@ public class TestDatabaseUnprocessedConsumer
extends AbstractMonitoredConsumer
implements DatabaseUnprocessedArtifactConsumer
{
private Logger log = LoggerFactory.getLogger( TestDatabaseUnprocessedConsumer.class );
private int countBegin = 0;
private int countComplete = 0;
@ -71,7 +75,7 @@ public class TestDatabaseUnprocessedConsumer
public void processArchivaArtifact( ArchivaArtifact artifact )
throws ConsumerException
{
getLogger().info( "Processing Artifact: " + artifact );
log.info( "Processing Artifact: " + artifact );
countProcessed++;
}