MRM-708 - Migrate from Plexus Logging to Slf4J

* Finishing up AbstractLogEnabled conversions to Slf4J.



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@629704 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2008-02-21 07:31:33 +00:00
parent 45fca34ec2
commit 5f27f502ff
37 changed files with 354 additions and 322 deletions

View File

@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
*/ */
public class Checksums public class Checksums
{ {
private static final Logger LOG = LoggerFactory.getLogger(Checksums.class); private Logger log = LoggerFactory.getLogger(Checksums.class);
/** /**
* @plexus.requirement role-hint="sha1" * @plexus.requirement role-hint="sha1"
@ -66,7 +66,7 @@ public class Checksums
// Both files missing is a failure. // Both files missing is a failure.
if ( !sha1File.exists() && !md5File.exists() ) if ( !sha1File.exists() && !md5File.exists() )
{ {
LOG.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." ); log.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." );
checksPass = false; checksPass = false;
} }
@ -75,7 +75,7 @@ public class Checksums
// Bad sha1 checksum is a failure. // Bad sha1 checksum is a failure.
if ( !validateChecksum( sha1File, "sha1" ) ) if ( !validateChecksum( sha1File, "sha1" ) )
{ {
LOG.warn( "SHA1 is incorrect for " + file.getPath() ); log.warn( "SHA1 is incorrect for " + file.getPath() );
checksPass = false; checksPass = false;
} }
} }
@ -85,7 +85,7 @@ public class Checksums
// Bad md5 checksum is a failure. // Bad md5 checksum is a failure.
if ( !validateChecksum( md5File, "md5" ) ) if ( !validateChecksum( md5File, "md5" ) )
{ {
LOG.warn( "MD5 is incorrect for " + file.getPath() ); log.warn( "MD5 is incorrect for " + file.getPath() );
checksPass = false; checksPass = false;
} }
} }
@ -139,12 +139,12 @@ public class Checksums
} }
catch ( DigesterException e ) catch ( DigesterException e )
{ {
LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false; return false;
} }
catch ( IOException e ) catch ( IOException e )
{ {
LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e ); log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false; return false;
} }
} }
@ -169,28 +169,28 @@ public class Checksums
{ {
if ( checksumFile.isValidChecksum( hashFile ) ) if ( checksumFile.isValidChecksum( hashFile ) )
{ {
LOG.debug( "Valid checksum: " + hashFile.getPath() ); log.debug( "Valid checksum: " + hashFile.getPath() );
return true; return true;
} }
else else
{ {
LOG.debug( "Not valid checksum: " + hashFile.getPath() ); log.debug( "Not valid checksum: " + hashFile.getPath() );
return createChecksum( localFile, digester ); return createChecksum( localFile, digester );
} }
} }
catch ( FileNotFoundException e ) catch ( FileNotFoundException e )
{ {
LOG.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
catch ( DigesterException e ) catch ( DigesterException e )
{ {
LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
catch ( IOException e ) catch ( IOException e )
{ {
LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
} }
@ -213,27 +213,27 @@ public class Checksums
boolean validity = checksumFile.isValidChecksum( hashFile ); boolean validity = checksumFile.isValidChecksum( hashFile );
if ( validity ) if ( validity )
{ {
LOG.debug( "Valid checksum: " + hashFile.getPath() ); log.debug( "Valid checksum: " + hashFile.getPath() );
} }
else else
{ {
LOG.debug( "Not valid checksum: " + hashFile.getPath() ); log.debug( "Not valid checksum: " + hashFile.getPath() );
} }
return validity; return validity;
} }
catch ( FileNotFoundException e ) catch ( FileNotFoundException e )
{ {
LOG.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
catch ( DigesterException e ) catch ( DigesterException e )
{ {
LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
catch ( IOException e ) catch ( IOException e )
{ {
LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e ); log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false; return false;
} }
} }

View File

@ -36,12 +36,13 @@ import org.codehaus.plexus.evaluator.DefaultExpressionEvaluator;
import org.codehaus.plexus.evaluator.EvaluatorException; import org.codehaus.plexus.evaluator.EvaluatorException;
import org.codehaus.plexus.evaluator.ExpressionEvaluator; import org.codehaus.plexus.evaluator.ExpressionEvaluator;
import org.codehaus.plexus.evaluator.sources.SystemPropertyExpressionSource; import org.codehaus.plexus.evaluator.sources.SystemPropertyExpressionSource;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryException; import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -83,9 +84,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration" * @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration"
*/ */
public class DefaultArchivaConfiguration public class DefaultArchivaConfiguration
extends AbstractLogEnabled
implements ArchivaConfiguration, RegistryListener, Initializable implements ArchivaConfiguration, RegistryListener, Initializable
{ {
private Logger log = LoggerFactory.getLogger(DefaultArchivaConfiguration.class);
/** /**
* Plexus registry to read the configuration from. * Plexus registry to read the configuration from.
* *
@ -287,7 +289,7 @@ public class DefaultArchivaConfiguration
else else
{ {
// Policy key doesn't exist. Don't add it to golden version. // Policy key doesn't exist. Don't add it to golden version.
getLogger().warn( "Policy [" + policyId + "] does not exist." ); log.warn( "Policy [" + policyId + "] does not exist." );
} }
} }
@ -323,13 +325,13 @@ public class DefaultArchivaConfiguration
{ {
if ( MapUtils.isEmpty( prePolicies ) ) if ( MapUtils.isEmpty( prePolicies ) )
{ {
getLogger().error( "No PreDownloadPolicies found!" ); log.error( "No PreDownloadPolicies found!" );
return null; return null;
} }
if ( MapUtils.isEmpty( postPolicies ) ) if ( MapUtils.isEmpty( postPolicies ) )
{ {
getLogger().error( "No PostDownloadPolicies found!" ); log.error( "No PostDownloadPolicies found!" );
return null; return null;
} }
@ -354,13 +356,13 @@ public class DefaultArchivaConfiguration
{ {
if ( MapUtils.isEmpty( prePolicies ) ) if ( MapUtils.isEmpty( prePolicies ) )
{ {
getLogger().error( "No PreDownloadPolicies found!" ); log.error( "No PreDownloadPolicies found!" );
return false; return false;
} }
if ( MapUtils.isEmpty( postPolicies ) ) if ( MapUtils.isEmpty( postPolicies ) )
{ {
getLogger().error( "No PostDownloadPolicies found!" ); log.error( "No PostDownloadPolicies found!" );
return false; return false;
} }
@ -505,7 +507,7 @@ public class DefaultArchivaConfiguration
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger().error( "Unable to create " + filetype + " file: " + e.getMessage(), e ); log.error( "Unable to create " + filetype + " file: " + e.getMessage(), e );
return false; return false;
} }
} }
@ -521,7 +523,7 @@ public class DefaultArchivaConfiguration
} }
catch ( Throwable t ) catch ( Throwable t )
{ {
getLogger().warn( "Unable to notify of saved configuration event.", t ); log.warn( "Unable to notify of saved configuration event.", t );
} }
} }
} }

View File

@ -19,17 +19,6 @@ package org.apache.maven.archiva.configuration;
* under the License. * under the License.
*/ */
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.configuration.CombinedConfiguration;
import org.apache.maven.archiva.configuration.functors.FiletypeSelectionPredicate;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@ -37,6 +26,19 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.configuration.CombinedConfiguration;
import org.apache.maven.archiva.common.utils.Slf4JPlexusLogger;
import org.apache.maven.archiva.configuration.functors.FiletypeSelectionPredicate;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* FileTypes * FileTypes
* *
@ -46,9 +48,10 @@ import java.util.Map;
* @plexus.component role="org.apache.maven.archiva.configuration.FileTypes" * @plexus.component role="org.apache.maven.archiva.configuration.FileTypes"
*/ */
public class FileTypes public class FileTypes
extends AbstractLogEnabled
implements Initializable implements Initializable
{ {
private Logger log = LoggerFactory.getLogger(FileTypes.class);
public static final String ARTIFACTS = "artifacts"; public static final String ARTIFACTS = "artifacts";
public static final String AUTO_REMOVE = "auto-remove"; public static final String AUTO_REMOVE = "auto-remove";
@ -122,7 +125,7 @@ public class FileTypes
Field fld = commonsRegistry.getClass().getDeclaredField( "configuration" ); Field fld = commonsRegistry.getClass().getDeclaredField( "configuration" );
fld.setAccessible( true ); fld.setAccessible( true );
fld.set( commonsRegistry, new CombinedConfiguration() ); fld.set( commonsRegistry, new CombinedConfiguration() );
commonsRegistry.enableLogging( getLogger() ); commonsRegistry.enableLogging( new Slf4JPlexusLogger( FileTypes.class ) );
commonsRegistry.addConfigurationFromResource( "org/apache/maven/archiva/configuration/default-archiva.xml" ); commonsRegistry.addConfigurationFromResource( "org/apache/maven/archiva/configuration/default-archiva.xml" );
// Read configuration as it was intended. // Read configuration as it was intended.

View File

@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors;
import org.apache.commons.collections.Predicate; import org.apache.commons.collections.Predicate;
import org.apache.maven.archiva.indexer.RepositoryContentIndex; import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException; import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* Test the {@link RepositoryContentIndex} object for the existance of an index. * Test the {@link RepositoryContentIndex} object for the existance of an index.
@ -35,9 +36,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* role-hint="index-exists" * role-hint="index-exists"
*/ */
public class IndexExistsPredicate public class IndexExistsPredicate
extends AbstractLogEnabled
implements Predicate implements Predicate
{ {
private Logger log = LoggerFactory.getLogger( IndexExistsPredicate.class );
public boolean evaluate( Object object ) public boolean evaluate( Object object )
{ {
boolean satisfies = false; boolean satisfies = false;
@ -51,7 +53,7 @@ public class IndexExistsPredicate
} }
catch ( RepositoryIndexException e ) catch ( RepositoryIndexException e )
{ {
getLogger().info( log.info(
"Repository Content Index [" + index.getId() + "] for repository [" "Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in [" + index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." ); + index.getIndexDirectory().getAbsolutePath() + "]." );

View File

@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors;
import org.apache.commons.collections.Transformer; import org.apache.commons.collections.Transformer;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndex; import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndex;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* SearchableTransformer * SearchableTransformer
@ -33,9 +34,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* @plexus.component role="org.apache.commons.collections.Transformer" role-hint="searchable" * @plexus.component role="org.apache.commons.collections.Transformer" role-hint="searchable"
*/ */
public class SearchableTransformer public class SearchableTransformer
extends AbstractLogEnabled
implements Transformer implements Transformer
{ {
private Logger log = LoggerFactory.getLogger( SearchableTransformer.class );
public Object transform( Object input ) public Object transform( Object input )
{ {
if ( input instanceof LuceneRepositoryContentIndex ) if ( input instanceof LuceneRepositoryContentIndex )
@ -47,7 +49,7 @@ public class SearchableTransformer
} }
catch ( RepositoryIndexSearchException e ) catch ( RepositoryIndexSearchException e )
{ {
getLogger().warn("Unable to get searchable for index:" + e.getMessage(), e); log.warn("Unable to get searchable for index:" + e.getMessage(), e);
} }
} }

View File

@ -19,9 +19,10 @@ package org.apache.maven.archiva.indexer.search;
* under the License. * under the License.
*/ */
import org.apache.commons.collections.CollectionUtils; import java.io.IOException;
import org.apache.commons.collections.Predicate; import java.util.ArrayList;
import org.apache.commons.collections.Transformer; import java.util.List;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.queryParser.MultiFieldQueryParser; import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException; import org.apache.lucene.queryParser.ParseException;
@ -38,21 +39,17 @@ import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException; import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers; import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers; import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.functors.UserAllowedToSearchRepositoryPredicate;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers; import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys; import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter; import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneQuery; import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord; import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import java.io.IOException; import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/** /**
* DefaultCrossRepositorySearch * DefaultCrossRepositorySearch
@ -62,9 +59,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default" * @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default"
*/ */
public class DefaultCrossRepositorySearch public class DefaultCrossRepositorySearch
extends AbstractLogEnabled
implements CrossRepositorySearch, RegistryListener, Initializable implements CrossRepositorySearch, RegistryListener, Initializable
{ {
private Logger log = LoggerFactory.getLogger( DefaultCrossRepositorySearch.class );
/** /**
* @plexus.requirement role-hint="lucene" * @plexus.requirement role-hint="lucene"
*/ */
@ -93,7 +91,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
getLogger().warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e ); log.warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e );
} }
// empty results. // empty results.
@ -115,7 +113,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
} }
// empty results. // empty results.
@ -137,7 +135,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e ); log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
} }
// empty results. // empty results.
@ -212,7 +210,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( java.text.ParseException e ) catch ( java.text.ParseException e )
{ {
getLogger().warn( "Unable to parse document into record: " + e.getMessage(), e ); log.warn( "Unable to parse document into record: " + e.getMessage(), e );
} }
} }
} }
@ -220,7 +218,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger().error( "Unable to setup multi-search: " + e.getMessage(), e ); log.error( "Unable to setup multi-search: " + e.getMessage(), e );
} }
finally finally
{ {
@ -233,7 +231,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( IOException ie ) catch ( IOException ie )
{ {
getLogger().error( "Unable to close index searcher: " + ie.getMessage(), ie ); log.error( "Unable to close index searcher: " + ie.getMessage(), ie );
} }
} }
@ -251,7 +249,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( RepositoryIndexSearchException e ) catch ( RepositoryIndexSearchException e )
{ {
getLogger().warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :" log.warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
+ e.getMessage(), e ); + e.getMessage(), e );
} }
} }
@ -329,7 +327,7 @@ public class DefaultCrossRepositorySearch
} }
catch ( RepositoryIndexException e ) catch ( RepositoryIndexException e )
{ {
getLogger().info( log.info(
"Repository Content Index [" + index.getId() + "] for repository [" "Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in [" + index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." ); + index.getIndexDirectory().getAbsolutePath() + "]." );

View File

@ -19,16 +19,17 @@ package org.apache.maven.archiva.policies;
* under the License. * under the License.
*/ */
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Calendar; import java.util.Calendar;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* AbstractUpdatePolicy * AbstractUpdatePolicy
* *
@ -36,9 +37,10 @@ import java.util.Properties;
* @version $Id$ * @version $Id$
*/ */
public abstract class AbstractUpdatePolicy public abstract class AbstractUpdatePolicy
extends AbstractLogEnabled
implements PreDownloadPolicy implements PreDownloadPolicy
{ {
private Logger log = LoggerFactory.getLogger( AbstractUpdatePolicy.class );
/** /**
* The ALWAYS policy setting means that the artifact is always uipdated from the remote repo. * The ALWAYS policy setting means that the artifact is always uipdated from the remote repo.
*/ */
@ -127,20 +129,20 @@ public abstract class AbstractUpdatePolicy
if ( ALWAYS.equals( policySetting ) ) if ( ALWAYS.equals( policySetting ) )
{ {
// Skip means ok to update. // Skip means ok to update.
getLogger().debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." ); log.debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." );
return; return;
} }
// Test for mismatches. // Test for mismatches.
if ( !isSnapshotVersion && isSnapshotPolicy() ) if ( !isSnapshotVersion && isSnapshotPolicy() )
{ {
getLogger().debug( "OK to update, snapshot policy does not apply for non-snapshot versions." ); log.debug( "OK to update, snapshot policy does not apply for non-snapshot versions." );
return; return;
} }
if ( isSnapshotVersion && !isSnapshotPolicy() ) if ( isSnapshotVersion && !isSnapshotPolicy() )
{ {
getLogger().debug( "OK to update, release policy does not apply for snapshot versions." ); log.debug( "OK to update, release policy does not apply for snapshot versions." );
return; return;
} }
@ -153,7 +155,7 @@ public abstract class AbstractUpdatePolicy
if ( !localFile.exists() ) if ( !localFile.exists() )
{ {
// No file means it's ok. // No file means it's ok.
getLogger().debug( "OK to update " + getUpdateMode() + ", local file does not exist." ); log.debug( "OK to update " + getUpdateMode() + ", local file does not exist." );
return; return;
} }

View File

@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies;
* under the License. * under the License.
*/ */
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* {@link PreDownloadPolicy} to check if the requested url has failed before. * {@link PreDownloadPolicy} to check if the requested url has failed before.
* *
@ -37,9 +38,10 @@ import java.util.Properties;
* role-hint="cache-failures" * role-hint="cache-failures"
*/ */
public class CachedFailuresPolicy public class CachedFailuresPolicy
extends AbstractLogEnabled
implements PreDownloadPolicy implements PreDownloadPolicy
{ {
private Logger log = LoggerFactory.getLogger( CachedFailuresPolicy.class );
/** /**
* The NO policy setting means that the the existence of old failures is <strong>not</strong> checked. * The NO policy setting means that the the existence of old failures is <strong>not</strong> checked.
* All resource requests are allowed thru to the remote repo. * All resource requests are allowed thru to the remote repo.
@ -78,7 +80,7 @@ public class CachedFailuresPolicy
if ( NO.equals( policySetting ) ) if ( NO.equals( policySetting ) )
{ {
// Skip. // Skip.
getLogger().debug( "OK to fetch, check-failures policy set to NO." ); log.debug( "OK to fetch, check-failures policy set to NO." );
return; return;
} }
@ -92,7 +94,7 @@ public class CachedFailuresPolicy
} }
} }
getLogger().debug( "OK to fetch, check-failures detected no issues." ); log.debug( "OK to fetch, check-failures detected no issues." );
} }
public String getDefaultOption() public String getDefaultOption()

View File

@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies;
* under the License. * under the License.
*/ */
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* ChecksumPolicy - a policy applied after the download to see if the file has been downloaded * ChecksumPolicy - a policy applied after the download to see if the file has been downloaded
* successfully and completely (or not). * successfully and completely (or not).
@ -39,9 +40,10 @@ import java.util.Properties;
* role-hint="checksum" * role-hint="checksum"
*/ */
public class ChecksumPolicy public class ChecksumPolicy
extends AbstractLogEnabled
implements PostDownloadPolicy implements PostDownloadPolicy
{ {
private Logger log = LoggerFactory.getLogger( ChecksumPolicy.class );
/** /**
* The IGNORE policy indicates that if the checksum policy is ignored, and * The IGNORE policy indicates that if the checksum policy is ignored, and
* the state of, contents of, or validity of the checksum files are not * the state of, contents of, or validity of the checksum files are not
@ -90,7 +92,7 @@ public class ChecksumPolicy
if ( IGNORE.equals( policySetting ) ) if ( IGNORE.equals( policySetting ) )
{ {
// Ignore. // Ignore.
getLogger().debug( "Checksum policy set to IGNORE." ); log.debug( "Checksum policy set to IGNORE." );
return; return;
} }
@ -131,7 +133,7 @@ public class ChecksumPolicy
{ {
if( checksums.update( localFile ) ) if( checksums.update( localFile ) )
{ {
getLogger().debug( "Checksum policy set to FIX, checksum files have been updated." ); log.debug( "Checksum policy set to FIX, checksum files have been updated." );
return; return;
} }
else else

View File

@ -19,9 +19,18 @@ package org.apache.maven.archiva.proxy;
* under the License. * under the License.
*/ */
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames; import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -56,24 +65,13 @@ import org.apache.maven.wagon.authentication.AuthenticationException;
import org.apache.maven.wagon.authentication.AuthenticationInfo; import org.apache.maven.wagon.authentication.AuthenticationInfo;
import org.apache.maven.wagon.proxy.ProxyInfo; import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.repository.Repository; import org.apache.maven.wagon.repository.Repository;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.codehaus.plexus.util.SelectorUtils; import org.codehaus.plexus.util.SelectorUtils;
import org.slf4j.Logger;
import java.io.File; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URLClassLoader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
/** /**
* DefaultRepositoryProxyConnectors * DefaultRepositoryProxyConnectors
@ -83,9 +81,10 @@ import java.util.Properties;
* @plexus.component role-hint="default" * @plexus.component role-hint="default"
*/ */
public class DefaultRepositoryProxyConnectors public class DefaultRepositoryProxyConnectors
extends AbstractLogEnabled
implements RepositoryProxyConnectors, RegistryListener, Initializable implements RepositoryProxyConnectors, RegistryListener, Initializable
{ {
private Logger log = LoggerFactory.getLogger( DefaultRepositoryProxyConnectors.class );
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -160,29 +159,29 @@ public class DefaultRepositoryProxyConnectors
if ( fileExists( downloadedFile ) ) if ( fileExists( downloadedFile ) )
{ {
getLogger().debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() ); log.debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() );
return downloadedFile; return downloadedFile;
} }
} }
catch ( NotFoundException e ) catch ( NotFoundException e )
{ {
getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \"" log.debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( NotModifiedException e ) catch ( NotModifiedException e )
{ {
getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \"" log.debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( ProxyException e ) catch ( ProxyException e )
{ {
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for artifact " + Keys.toKey( artifact ) + ", continuing to next repository. Error message: " + "\" for artifact " + Keys.toKey( artifact ) + ", continuing to next repository. Error message: " +
e.getMessage() ); e.getMessage() );
getLogger().debug( "Full stack trace", e ); log.debug( "Full stack trace", e );
} }
} }
getLogger().debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." ); log.debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." );
return null; return null;
} }
@ -221,22 +220,22 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( NotFoundException e ) catch ( NotFoundException e )
{ {
getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata ) log.debug( "Versioned Metadata " + Keys.toKey( metadata )
+ " not found on remote repository \"" + " not found on remote repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( NotModifiedException e ) catch ( NotModifiedException e )
{ {
getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata ) log.debug( "Versioned Metadata " + Keys.toKey( metadata )
+ " not updated on remote repository \"" + " not updated on remote repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( ProxyException e ) catch ( ProxyException e )
{ {
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for versioned Metadata " + Keys.toKey( metadata ) + "\" for versioned Metadata " + Keys.toKey( metadata ) +
", continuing to next repository. Error message: " + e.getMessage() ); ", continuing to next repository. Error message: " + e.getMessage() );
getLogger().debug( "Full stack trace", e ); log.debug( "Full stack trace", e );
} }
} }
@ -253,24 +252,24 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( LayoutException e ) catch ( LayoutException e )
{ {
getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( RepositoryMetadataException e ) catch ( RepositoryMetadataException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( ContentNotFoundException e ) catch ( ContentNotFoundException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
@ -339,21 +338,21 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( NotFoundException e ) catch ( NotFoundException e )
{ {
getLogger().debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \"" log.debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( NotModifiedException e ) catch ( NotModifiedException e )
{ {
getLogger().debug( "Project Metadata " + Keys.toKey( metadata ) log.debug( "Project Metadata " + Keys.toKey( metadata )
+ " not updated on remote repository \"" + " not updated on remote repository \""
+ targetRepository.getRepository().getId() + "\"." ); + targetRepository.getRepository().getId() + "\"." );
} }
catch ( ProxyException e ) catch ( ProxyException e )
{ {
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() + log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for project metadata " + Keys.toKey( metadata ) + "\" for project metadata " + Keys.toKey( metadata ) +
", continuing to next repository. Error message: " + e.getMessage() ); ", continuing to next repository. Error message: " + e.getMessage() );
getLogger().debug( "Full stack trace", e ); log.debug( "Full stack trace", e );
} }
} }
@ -371,24 +370,24 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( LayoutException e ) catch ( LayoutException e )
{ {
getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() ); log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( RepositoryMetadataException e ) catch ( RepositoryMetadataException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( IOException e ) catch ( IOException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
catch ( ContentNotFoundException e ) catch ( ContentNotFoundException e )
{ {
getLogger() log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e ); .warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report? // TODO: add into repository report?
} }
@ -495,7 +494,7 @@ public class DefaultRepositoryProxyConnectors
// Path must belong to whitelist. // Path must belong to whitelist.
if ( !matchesPattern( remotePath, connector.getWhitelist() ) ) if ( !matchesPattern( remotePath, connector.getWhitelist() ) )
{ {
getLogger().debug( "Path [" + remotePath + log.debug( "Path [" + remotePath +
"] is not part of defined whitelist (skipping transfer from repository [" + "] is not part of defined whitelist (skipping transfer from repository [" +
remoteRepository.getRepository().getName() + "])." ); remoteRepository.getRepository().getName() + "])." );
return null; return null;
@ -505,7 +504,7 @@ public class DefaultRepositoryProxyConnectors
// Is target path part of blacklist? // Is target path part of blacklist?
if ( matchesPattern( remotePath, connector.getBlacklist() ) ) if ( matchesPattern( remotePath, connector.getBlacklist() ) )
{ {
getLogger().debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" + log.debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" +
remoteRepository.getRepository().getName() + "])." ); remoteRepository.getRepository().getName() + "])." );
return null; return null;
} }
@ -520,11 +519,11 @@ public class DefaultRepositoryProxyConnectors
String emsg = "Transfer not attempted on " + url + " : " + e.getMessage(); String emsg = "Transfer not attempted on " + url + " : " + e.getMessage();
if ( fileExists( localFile ) ) if ( fileExists( localFile ) )
{ {
getLogger().info( emsg + ": using already present local file." ); log.info( emsg + ": using already present local file." );
return localFile; return localFile;
} }
getLogger().info( emsg ); log.info( emsg );
return null; return null;
} }
@ -573,7 +572,7 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( ConnectionException e ) catch ( ConnectionException e )
{ {
getLogger().warn( "Unable to disconnect wagon.", e ); log.warn( "Unable to disconnect wagon.", e );
} }
} }
} }
@ -585,7 +584,7 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( PolicyViolationException e ) catch ( PolicyViolationException e )
{ {
getLogger().info( "Transfer invalidated from " + url + " : " + e.getMessage() ); log.info( "Transfer invalidated from " + url + " : " + e.getMessage() );
if ( fileExists( localFile ) ) if ( fileExists( localFile ) )
{ {
return localFile; return localFile;
@ -629,22 +628,22 @@ public class DefaultRepositoryProxyConnectors
{ {
File hashFile = new File( localFile.getAbsolutePath() + type ); File hashFile = new File( localFile.getAbsolutePath() + type );
transferSimpleFile( wagon, remoteRepository, remotePath + type, hashFile ); transferSimpleFile( wagon, remoteRepository, remotePath + type, hashFile );
getLogger().debug( "Checksum" + type + " Downloaded: " + hashFile ); log.debug( "Checksum" + type + " Downloaded: " + hashFile );
} }
catch ( NotFoundException e ) catch ( NotFoundException e )
{ {
getLogger().debug( "Transfer failed, checksum not found: " + url ); log.debug( "Transfer failed, checksum not found: " + url );
// Consume it, do not pass this on. // Consume it, do not pass this on.
} }
catch ( NotModifiedException e ) catch ( NotModifiedException e )
{ {
getLogger().debug( "Transfer skipped, checksum not modified: " + url ); log.debug( "Transfer skipped, checksum not modified: " + url );
// Consume it, do not pass this on. // Consume it, do not pass this on.
} }
catch ( ProxyException e ) catch ( ProxyException e )
{ {
urlFailureCache.cacheFailure( url + type ); urlFailureCache.cacheFailure( url + type );
getLogger().warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e ); log.warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e );
// Critical issue, pass it on. // Critical issue, pass it on.
throw e; throw e;
} }
@ -678,7 +677,7 @@ public class DefaultRepositoryProxyConnectors
if ( !localFile.exists() ) if ( !localFile.exists() )
{ {
getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() ); log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() );
wagon.get( remotePath, temp ); wagon.get( remotePath, temp );
success = true; success = true;
@ -688,11 +687,11 @@ public class DefaultRepositoryProxyConnectors
} }
// You wouldn't get here on failure, a WagonException would have been thrown. // You wouldn't get here on failure, a WagonException would have been thrown.
getLogger().debug( "Downloaded successfully." ); log.debug( "Downloaded successfully." );
} }
else else
{ {
getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName()
+ " if updated" ); + " if updated" );
success = wagon.getIfNewer( remotePath, temp, localFile.lastModified() ); success = wagon.getIfNewer( remotePath, temp, localFile.lastModified() );
if ( !success ) if ( !success )
@ -703,7 +702,7 @@ public class DefaultRepositoryProxyConnectors
if ( temp.exists() ) if ( temp.exists() )
{ {
getLogger().debug( "Downloaded successfully." ); log.debug( "Downloaded successfully." );
moveTempToTarget( temp, localFile ); moveTempToTarget( temp, localFile );
} }
} }
@ -748,14 +747,14 @@ public class DefaultRepositoryProxyConnectors
String defaultSetting = policy.getDefaultOption(); String defaultSetting = policy.getDefaultOption();
String setting = StringUtils.defaultString( (String) settings.get( key ), defaultSetting ); String setting = StringUtils.defaultString( (String) settings.get( key ), defaultSetting );
getLogger().debug( "Applying [" + key + "] policy with [" + setting + "]" ); log.debug( "Applying [" + key + "] policy with [" + setting + "]" );
try try
{ {
policy.applyPolicy( setting, request, localFile ); policy.applyPolicy( setting, request, localFile );
} }
catch ( PolicyConfigurationException e ) catch ( PolicyConfigurationException e )
{ {
getLogger().error( e.getMessage(), e ); log.error( e.getMessage(), e );
} }
} }
} }
@ -778,7 +777,7 @@ public class DefaultRepositoryProxyConnectors
if ( !temp.renameTo( target ) ) if ( !temp.renameTo( target ) )
{ {
getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." ); log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
try try
{ {
@ -821,7 +820,7 @@ public class DefaultRepositoryProxyConnectors
if ( StringUtils.isNotBlank( username ) && StringUtils.isNotBlank( password ) ) if ( StringUtils.isNotBlank( username ) && StringUtils.isNotBlank( password ) )
{ {
getLogger().debug( "Using username " + username + " to connect to remote repository " log.debug( "Using username " + username + " to connect to remote repository "
+ remoteRepository.getURL() ); + remoteRepository.getURL() );
authInfo = new AuthenticationInfo(); authInfo = new AuthenticationInfo();
authInfo.setUserName( username ); authInfo.setUserName( username );
@ -829,7 +828,7 @@ public class DefaultRepositoryProxyConnectors
} }
else else
{ {
getLogger().debug( "No authentication for remote repository needed" ); log.debug( "No authentication for remote repository needed" );
} }
//Convert seconds to milliseconds //Convert seconds to milliseconds
@ -851,14 +850,14 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( ConnectionException e ) catch ( ConnectionException e )
{ {
getLogger().warn( log.warn(
"Could not connect to " + remoteRepository.getRepository().getName() + ": " "Could not connect to " + remoteRepository.getRepository().getName() + ": "
+ e.getMessage() ); + e.getMessage() );
connected = false; connected = false;
} }
catch ( AuthenticationException e ) catch ( AuthenticationException e )
{ {
getLogger().warn( log.warn(
"Could not connect to " + remoteRepository.getRepository().getName() + ": " "Could not connect to " + remoteRepository.getRepository().getName() + ": "
+ e.getMessage() ); + e.getMessage() );
connected = false; connected = false;
@ -926,6 +925,16 @@ public class DefaultRepositoryProxyConnectors
/* do nothing */ /* do nothing */
} }
private void logProcess( String managedRepoId, String resource, String event )
{
}
private void logRejection( String managedRepoId, String remoteRepoId, String resource, String reason )
{
}
private void initConnectorsAndNetworkProxies() private void initConnectorsAndNetworkProxies()
{ {
synchronized ( this.proxyConnectorMap ) synchronized ( this.proxyConnectorMap )
@ -988,11 +997,11 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( RepositoryNotFoundException e ) catch ( RepositoryNotFoundException e )
{ {
getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e ); log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
} }
catch ( RepositoryException e ) catch ( RepositoryException e )
{ {
getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e ); log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
} }
} }

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.proxy;
* under the License. * under the License.
*/ */
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.maven.wagon.ConnectionException; import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException; import org.apache.maven.wagon.ResourceDoesNotExistException;
@ -31,11 +35,8 @@ import org.apache.maven.wagon.events.SessionListener;
import org.apache.maven.wagon.events.TransferListener; import org.apache.maven.wagon.events.TransferListener;
import org.apache.maven.wagon.proxy.ProxyInfo; import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.repository.Repository; import org.apache.maven.wagon.repository.Repository;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.List;
/** /**
* A dummy wagon implementation * A dummy wagon implementation
@ -43,9 +44,10 @@ import java.util.List;
* @author <a href="mailto:brett@apache.org">Brett Porter</a> * @author <a href="mailto:brett@apache.org">Brett Porter</a>
*/ */
public class WagonDelegate public class WagonDelegate
extends AbstractLogEnabled
implements Wagon implements Wagon
{ {
private Logger log = LoggerFactory.getLogger( WagonDelegate.class );
private Wagon delegate; private Wagon delegate;
private String contentToGet; private String contentToGet;
@ -53,7 +55,7 @@ public class WagonDelegate
public void get( String resourceName, File destination ) public void get( String resourceName, File destination )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{ {
getLogger().debug( ".get(" + resourceName + ", " + destination + ")" ); log.debug( ".get(" + resourceName + ", " + destination + ")" );
delegate.get( resourceName, destination ); delegate.get( resourceName, destination );
create( destination ); create( destination );
} }
@ -61,7 +63,7 @@ public class WagonDelegate
public boolean getIfNewer( String resourceName, File destination, long timestamp ) public boolean getIfNewer( String resourceName, File destination, long timestamp )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{ {
getLogger().info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" ); log.info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" );
boolean result = delegate.getIfNewer( resourceName, destination, timestamp ); boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
createIfMissing( destination ); createIfMissing( destination );

View File

@ -19,7 +19,8 @@ package org.apache.maven.archiva.repository.audit;
* under the License. * under the License.
*/ */
import org.apache.log4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* AuditLog - Audit Log. * AuditLog - Audit Log.
@ -33,7 +34,7 @@ import org.apache.log4j.Logger;
public class AuditLog public class AuditLog
implements AuditListener implements AuditListener
{ {
public static final Logger logger = Logger.getLogger( "org.apache.archiva.AuditLog" ); public static final Logger logger = LoggerFactory.getLogger( "org.apache.archiva.AuditLog" );
private static final char DELIM = ' '; private static final char DELIM = ' ';

View File

@ -19,6 +19,8 @@ package org.apache.maven.archiva.repository.project;
* under the License. * under the License.
*/ */
import java.util.List;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames; import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -29,13 +31,12 @@ import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.project.resolvers.ManagedRepositoryProjectResolver; import org.apache.maven.archiva.repository.project.resolvers.ManagedRepositoryProjectResolver;
import org.apache.maven.archiva.repository.project.resolvers.NopProjectResolver; import org.apache.maven.archiva.repository.project.resolvers.NopProjectResolver;
import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolverStack; import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolverStack;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import java.util.List; import org.slf4j.LoggerFactory;
/** /**
* Factory for ProjectModelResolver objects * Factory for ProjectModelResolver objects
@ -45,9 +46,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.repository.project.ProjectModelResolverFactory" * @plexus.component role="org.apache.maven.archiva.repository.project.ProjectModelResolverFactory"
*/ */
public class ProjectModelResolverFactory public class ProjectModelResolverFactory
extends AbstractLogEnabled
implements RegistryListener, Initializable implements RegistryListener, Initializable
{ {
private Logger log = LoggerFactory.getLogger( ProjectModelResolverFactory.class );
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -128,7 +130,7 @@ public class ProjectModelResolverFactory
} }
catch ( RepositoryException e ) catch ( RepositoryException e )
{ {
getLogger().warn( e.getMessage(), e ); log.warn( e.getMessage(), e );
} }
} }

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.repository.scanner;
* under the License. * under the License.
*/ */
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.FileTypes; import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
@ -26,12 +30,9 @@ import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer; import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer; import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.apache.maven.archiva.repository.RepositoryException; import org.apache.maven.archiva.repository.RepositoryException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.DirectoryWalker; import org.codehaus.plexus.util.DirectoryWalker;
import org.slf4j.Logger;
import java.io.File; import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
/** /**
* DefaultRepositoryScanner * DefaultRepositoryScanner
@ -41,9 +42,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryScanner" * @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryScanner"
*/ */
public class DefaultRepositoryScanner public class DefaultRepositoryScanner
extends AbstractLogEnabled
implements RepositoryScanner implements RepositoryScanner
{ {
private Logger log = LoggerFactory.getLogger( DefaultRepositoryScanner.class );
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -112,7 +114,7 @@ public class DefaultRepositoryScanner
// Setup the Scan Instance // Setup the Scan Instance
RepositoryScannerInstance scannerInstance = new RepositoryScannerInstance( repository, knownContentConsumers, RepositoryScannerInstance scannerInstance = new RepositoryScannerInstance( repository, knownContentConsumers,
invalidContentConsumers, getLogger(), changesSince ); invalidContentConsumers, changesSince );
dirWalker.addDirectoryWalkListener( scannerInstance ); dirWalker.addDirectoryWalkListener( scannerInstance );

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.repository.scanner;
* under the License. * under the License.
*/ */
import java.io.File;
import java.util.List;
import org.apache.commons.collections.Closure; import org.apache.commons.collections.Closure;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.functors.IfClosure; import org.apache.commons.collections.functors.IfClosure;
@ -30,12 +33,9 @@ import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure; import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate; import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure; import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.DirectoryWalkListener; import org.codehaus.plexus.util.DirectoryWalkListener;
import org.slf4j.Logger;
import java.io.File; import org.slf4j.LoggerFactory;
import java.util.List;
/** /**
* RepositoryScannerInstance * RepositoryScannerInstance
* *
@ -45,6 +45,8 @@ import java.util.List;
public class RepositoryScannerInstance public class RepositoryScannerInstance
implements DirectoryWalkListener implements DirectoryWalkListener
{ {
private Logger log = LoggerFactory.getLogger( RepositoryScannerInstance.class );
/** /**
* Consumers that process known content. * Consumers that process known content.
*/ */
@ -65,24 +67,21 @@ public class RepositoryScannerInstance
private ConsumerWantsFilePredicate consumerWantsFile; private ConsumerWantsFilePredicate consumerWantsFile;
private Logger logger;
public RepositoryScannerInstance( ManagedRepositoryConfiguration repository, public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
List<KnownRepositoryContentConsumer> knownConsumerList, List<KnownRepositoryContentConsumer> knownConsumerList,
List<InvalidRepositoryContentConsumer> invalidConsumerList, Logger logger ) List<InvalidRepositoryContentConsumer> invalidConsumerList )
{ {
this.repository = repository; this.repository = repository;
this.knownConsumers = knownConsumerList; this.knownConsumers = knownConsumerList;
this.invalidConsumers = invalidConsumerList; this.invalidConsumers = invalidConsumerList;
this.logger = logger;
this.consumerProcessFile = new ConsumerProcessFileClosure( logger ); this.consumerProcessFile = new ConsumerProcessFileClosure();
this.consumerWantsFile = new ConsumerWantsFilePredicate(); this.consumerWantsFile = new ConsumerWantsFilePredicate();
stats = new RepositoryScanStatistics(); stats = new RepositoryScanStatistics();
stats.setRepositoryId( repository.getId() ); stats.setRepositoryId( repository.getId() );
Closure triggerBeginScan = new TriggerBeginScanClosure( repository, logger ); Closure triggerBeginScan = new TriggerBeginScanClosure( repository );
CollectionUtils.forAllDo( knownConsumerList, triggerBeginScan ); CollectionUtils.forAllDo( knownConsumerList, triggerBeginScan );
CollectionUtils.forAllDo( invalidConsumerList, triggerBeginScan ); CollectionUtils.forAllDo( invalidConsumerList, triggerBeginScan );
@ -95,10 +94,9 @@ public class RepositoryScannerInstance
public RepositoryScannerInstance( ManagedRepositoryConfiguration repository, public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
List<KnownRepositoryContentConsumer> knownContentConsumers, List<KnownRepositoryContentConsumer> knownContentConsumers,
List<InvalidRepositoryContentConsumer> invalidContentConsumers, Logger logger, List<InvalidRepositoryContentConsumer> invalidContentConsumers, long changesSince )
long changesSince )
{ {
this( repository, knownContentConsumers, invalidContentConsumers, logger ); this( repository, knownContentConsumers, invalidContentConsumers );
consumerWantsFile.setChangesSince( changesSince ); consumerWantsFile.setChangesSince( changesSince );
@ -112,13 +110,13 @@ public class RepositoryScannerInstance
public void directoryWalkStarting( File basedir ) public void directoryWalkStarting( File basedir )
{ {
logger.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() ); log.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() );
stats.triggerStart(); stats.triggerStart();
} }
public void directoryWalkStep( int percentage, File file ) public void directoryWalkStep( int percentage, File file )
{ {
logger.debug( "Walk Step: " + percentage + ", " + file ); log.debug( "Walk Step: " + percentage + ", " + file );
stats.increaseFileCount(); stats.increaseFileCount();
@ -146,7 +144,7 @@ public class RepositoryScannerInstance
public void directoryWalkFinished() public void directoryWalkFinished()
{ {
logger.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() ); log.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() );
stats.triggerFinished(); stats.triggerFinished();
} }
@ -155,6 +153,6 @@ public class RepositoryScannerInstance
*/ */
public void debug( String message ) public void debug( String message )
{ {
logger.debug( "Repository Scanner: " + message ); log.debug( "Repository Scanner: " + message );
} }
} }

View File

@ -23,7 +23,8 @@ import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.ConsumerException; import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer; import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.codehaus.plexus.logging.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* TriggerBeginScanClosure * TriggerBeginScanClosure
@ -34,14 +35,13 @@ import org.codehaus.plexus.logging.Logger;
public class TriggerBeginScanClosure public class TriggerBeginScanClosure
implements Closure implements Closure
{ {
private Logger log = LoggerFactory.getLogger( TriggerBeginScanClosure.class );
private ManagedRepositoryConfiguration repository; private ManagedRepositoryConfiguration repository;
private Logger logger; public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository )
public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository, Logger logger )
{ {
this.repository = repository; this.repository = repository;
this.logger = logger;
} }
public void execute( Object input ) public void execute( Object input )
@ -56,7 +56,7 @@ public class TriggerBeginScanClosure
} }
catch ( ConsumerException e ) catch ( ConsumerException e )
{ {
logger.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e ); log.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e );
} }
} }
} }

View File

@ -21,7 +21,6 @@ package org.apache.maven.archiva.transaction;
import org.codehaus.plexus.digest.Digester; import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException; import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.IOUtil;
@ -43,7 +42,6 @@ import java.util.Map;
* @version $Id$ * @version $Id$
*/ */
public abstract class AbstractTransactionEvent public abstract class AbstractTransactionEvent
extends AbstractLogEnabled
implements TransactionEvent implements TransactionEvent
{ {
private Map backups = new HashMap(); private Map backups = new HashMap();

View File

@ -19,6 +19,12 @@ package org.apache.maven.archiva.database.browsing;
* under the License. * under the License.
*/ */
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.PredicateUtils; import org.apache.commons.collections.PredicateUtils;
import org.apache.commons.collections.functors.NotPredicate; import org.apache.commons.collections.functors.NotPredicate;
@ -35,13 +41,8 @@ import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel; import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.model.Keys; import org.apache.maven.archiva.model.Keys;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/** /**
* DefaultRepositoryBrowsing * DefaultRepositoryBrowsing
@ -51,9 +52,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing" * @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing"
*/ */
public class DefaultRepositoryBrowsing public class DefaultRepositoryBrowsing
extends AbstractLogEnabled
implements RepositoryBrowsing implements RepositoryBrowsing
{ {
private Logger log = LoggerFactory.getLogger( DefaultRepositoryBrowsing.class );
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
@ -201,7 +203,7 @@ public class DefaultRepositoryBrowsing
{ {
Map<String, String> snapshots = new HashMap<String, String>(); Map<String, String> snapshots = new HashMap<String, String>();
getLogger().info( "Processing snapshots." ); log.info( "Processing snapshots." );
for ( String version : versions ) for ( String version : versions )
{ {
@ -253,7 +255,7 @@ public class DefaultRepositoryBrowsing
{ {
if ( VersionUtil.getBaseVersion( uniqueVersion ).equals( version ) ) if ( VersionUtil.getBaseVersion( uniqueVersion ).equals( version ) )
{ {
getLogger().info( "Retrieving artifact with version " + uniqueVersion ); log.info( "Retrieving artifact with version " + uniqueVersion );
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, uniqueVersion, null, "pom" ); pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, uniqueVersion, null, "pom" );
return pomArtifact; return pomArtifact;

View File

@ -28,7 +28,6 @@ import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.database.constraints.AbstractSimpleConstraint; import org.apache.maven.archiva.database.constraints.AbstractSimpleConstraint;
import org.apache.maven.archiva.model.CompoundKey; import org.apache.maven.archiva.model.CompoundKey;
import org.codehaus.plexus.jdo.JdoFactory; import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
@ -60,7 +59,6 @@ import javax.jdo.spi.PersistenceCapable;
* @plexus.component role="org.apache.maven.archiva.database.jdo.JdoAccess" role-hint="archiva" * @plexus.component role="org.apache.maven.archiva.database.jdo.JdoAccess" role-hint="archiva"
*/ */
public class JdoAccess public class JdoAccess
extends AbstractLogEnabled
implements Initializable, InstanceLifecycleListener, StoreLifecycleListener implements Initializable, InstanceLifecycleListener, StoreLifecycleListener
{ {
/** /**

View File

@ -25,7 +25,6 @@ import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO; import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO; import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint; import org.apache.maven.archiva.database.SimpleConstraint;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.Serializable; import java.io.Serializable;
import java.util.List; import java.util.List;
@ -39,7 +38,6 @@ import java.util.List;
* @plexus.component role-hint="jdo" * @plexus.component role-hint="jdo"
*/ */
public class JdoArchivaDAO public class JdoArchivaDAO
extends AbstractLogEnabled
implements ArchivaDAO implements ArchivaDAO
{ {
/** /**

View File

@ -26,7 +26,6 @@ import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel; import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.apache.maven.archiva.model.jpox.ArchivaArtifactModelKey; import org.apache.maven.archiva.model.jpox.ArchivaArtifactModelKey;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
@ -41,7 +40,6 @@ import java.util.List;
* @plexus.component role-hint="jdo" * @plexus.component role-hint="jdo"
*/ */
public class JdoArtifactDAO public class JdoArtifactDAO
extends AbstractLogEnabled
implements ArtifactDAO implements ArtifactDAO
{ {
/** /**

View File

@ -19,6 +19,8 @@ package org.apache.maven.archiva.database.project;
* under the License. * under the License.
*/ */
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO; import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException; import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException; import org.apache.maven.archiva.database.ObjectNotFoundException;
@ -28,9 +30,8 @@ import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelResolver; import org.apache.maven.archiva.repository.project.ProjectModelResolver;
import org.apache.maven.archiva.repository.project.resolvers.FilesystemBasedResolver; import org.apache.maven.archiva.repository.project.resolvers.FilesystemBasedResolver;
import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolutionListener; import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolutionListener;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/** /**
* Just in Time save of project models to the database, implemented as a listener * Just in Time save of project models to the database, implemented as a listener
@ -44,9 +45,10 @@ import java.util.List;
* role-hint="model-to-db" * role-hint="model-to-db"
*/ */
public class ProjectModelToDatabaseListener public class ProjectModelToDatabaseListener
extends AbstractLogEnabled
implements ProjectModelResolutionListener implements ProjectModelResolutionListener
{ {
private Logger log = LoggerFactory.getLogger( ProjectModelToDatabaseListener.class );
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
@ -147,7 +149,7 @@ public class ProjectModelToDatabaseListener
} }
catch ( ProjectModelException e ) catch ( ProjectModelException e )
{ {
getLogger().warn( e.getMessage(), e ); log.warn( e.getMessage(), e );
} }
} }
} }

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.database.updater;
* under the License. * under the License.
*/ */
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.collections.Predicate; import org.apache.commons.collections.Predicate;
@ -29,11 +33,8 @@ import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint; import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.functors.UnprocessedArtifactPredicate; import org.apache.maven.archiva.model.functors.UnprocessedArtifactPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/** /**
* JdoDatabaseUpdater * JdoDatabaseUpdater
@ -45,9 +46,10 @@ import java.util.List;
* role-hint="jdo" * role-hint="jdo"
*/ */
public class JdoDatabaseUpdater public class JdoDatabaseUpdater
extends AbstractLogEnabled
implements DatabaseUpdater implements DatabaseUpdater
{ {
private Logger log = LoggerFactory.getLogger( JdoDatabaseUpdater.class );
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
@ -144,7 +146,7 @@ public class JdoDatabaseUpdater
if ( CollectionUtils.isEmpty( consumers ) ) if ( CollectionUtils.isEmpty( consumers ) )
{ {
getLogger().warn( "There are no selected consumers for unprocessed artifacts." ); log.warn( "There are no selected consumers for unprocessed artifacts." );
return; return;
} }
@ -162,7 +164,7 @@ public class JdoDatabaseUpdater
if ( CollectionUtils.isEmpty( consumers ) ) if ( CollectionUtils.isEmpty( consumers ) )
{ {
getLogger().warn( "There are no selected consumers for artifact cleanup." ); log.warn( "There are no selected consumers for artifact cleanup." );
return; return;
} }

View File

@ -23,7 +23,8 @@ import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer; import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
import org.apache.maven.archiva.consumers.ConsumerException; import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.model.ArchivaArtifact; import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.logging.AbstractLogEnabled; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* ProcessArchivaArtifactClosure * ProcessArchivaArtifactClosure
@ -36,9 +37,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* instantiation-strategy="per-lookup" * instantiation-strategy="per-lookup"
*/ */
class ProcessArchivaArtifactClosure class ProcessArchivaArtifactClosure
extends AbstractLogEnabled
implements Closure implements Closure
{ {
private Logger log = LoggerFactory.getLogger( ProcessArchivaArtifactClosure.class );
private ArchivaArtifact artifact; private ArchivaArtifact artifact;
public void execute( Object input ) public void execute( Object input )
@ -53,9 +55,7 @@ class ProcessArchivaArtifactClosure
} }
catch ( ConsumerException e ) catch ( ConsumerException e )
{ {
getLogger().warn( log.warn( "Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId() + "]" );
"Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId()
+ "]" );
} }
} }

View File

@ -19,8 +19,6 @@ package org.apache.maven.archiva.reporting;
* under the License. * under the License.
*/ */
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.Map; import java.util.Map;
/** /**
@ -33,7 +31,6 @@ import java.util.Map;
* role-hint="default" * role-hint="default"
*/ */
public class DefaultReportingManager public class DefaultReportingManager
extends AbstractLogEnabled
implements ReportingManager implements ReportingManager
{ {
/** /**

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.scheduled;
* under the License. * under the License.
*/ */
import java.text.ParseException;
import java.util.List;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException; import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
@ -27,7 +30,6 @@ import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask; import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask; import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate; import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
@ -43,9 +45,8 @@ import org.quartz.CronTrigger;
import org.quartz.JobDataMap; import org.quartz.JobDataMap;
import org.quartz.JobDetail; import org.quartz.JobDetail;
import org.quartz.SchedulerException; import org.quartz.SchedulerException;
import org.slf4j.Logger;
import java.text.ParseException; import org.slf4j.LoggerFactory;
import java.util.List;
/** /**
* Default implementation of a scheduling component for archiva. * Default implementation of a scheduling component for archiva.
@ -55,9 +56,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.scheduled.ArchivaTaskScheduler" role-hint="default" * @plexus.component role="org.apache.maven.archiva.scheduled.ArchivaTaskScheduler" role-hint="default"
*/ */
public class DefaultArchivaTaskScheduler public class DefaultArchivaTaskScheduler
extends AbstractLogEnabled
implements ArchivaTaskScheduler, Startable, RegistryListener implements ArchivaTaskScheduler, Startable, RegistryListener
{ {
private Logger log = LoggerFactory.getLogger( DefaultArchivaTaskScheduler.class );
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -134,7 +136,7 @@ public class DefaultArchivaTaskScheduler
{ {
if ( repoConfig.getRefreshCronExpression() == null ) if ( repoConfig.getRefreshCronExpression() == null )
{ {
getLogger().warn( "Skipping job, no cron expression for " + repoConfig.getId() ); log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
return; return;
} }
@ -144,7 +146,7 @@ public class DefaultArchivaTaskScheduler
CronExpressionValidator cronValidator = new CronExpressionValidator(); CronExpressionValidator cronValidator = new CronExpressionValidator();
if ( !cronValidator.validate( cronString ) ) if ( !cronValidator.validate( cronString ) )
{ {
getLogger().warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() + log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
"] is invalid. Defaulting to hourly." ); "] is invalid. Defaulting to hourly." );
cronString = CRON_HOURLY; cronString = CRON_HOURLY;
} }
@ -168,7 +170,7 @@ public class DefaultArchivaTaskScheduler
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
getLogger().error( log.error(
"ParseException in repository scanning cron expression, disabling repository scanning for '" + "ParseException in repository scanning cron expression, disabling repository scanning for '" +
repoConfig.getId() + "': " + e.getMessage() ); repoConfig.getId() + "': " + e.getMessage() );
} }
@ -190,7 +192,7 @@ public class DefaultArchivaTaskScheduler
CronExpressionValidator cronValidator = new CronExpressionValidator(); CronExpressionValidator cronValidator = new CronExpressionValidator();
if ( !cronValidator.validate( cronString ) ) if ( !cronValidator.validate( cronString ) )
{ {
getLogger().warn( log.warn(
"Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." ); "Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." );
cronString = CRON_HOURLY; cronString = CRON_HOURLY;
} }
@ -203,7 +205,7 @@ public class DefaultArchivaTaskScheduler
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
getLogger().error( log.error(
"ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() ); "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
} }
@ -235,7 +237,7 @@ public class DefaultArchivaTaskScheduler
// cronExpression comes from the database scanning section // cronExpression comes from the database scanning section
if ( "cronExpression".equals( propertyName ) ) if ( "cronExpression".equals( propertyName ) )
{ {
getLogger().debug( "Restarting the database scheduled task after property change: " + propertyName ); log.debug( "Restarting the database scheduled task after property change: " + propertyName );
try try
{ {
@ -245,7 +247,7 @@ public class DefaultArchivaTaskScheduler
} }
catch ( SchedulerException e ) catch ( SchedulerException e )
{ {
getLogger().error( "Error restarting the database scanning job after property change." ); log.error( "Error restarting the database scanning job after property change." );
} }
} }
@ -269,7 +271,7 @@ public class DefaultArchivaTaskScheduler
} }
catch ( SchedulerException e ) catch ( SchedulerException e )
{ {
getLogger().error( "error restarting job: " + REPOSITORY_JOB + ":" + repoConfig.getId() ); log.error( "error restarting job: " + REPOSITORY_JOB + ":" + repoConfig.getId() );
} }
} }
} }

View File

@ -22,12 +22,13 @@ package org.apache.maven.archiva.scheduled.executors;
import org.apache.maven.archiva.database.ArchivaDatabaseException; import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.updater.DatabaseUpdater; import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask; import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task; import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor; import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* ArchivaDatabaseTaskExecutor * ArchivaDatabaseTaskExecutor
@ -40,9 +41,10 @@ import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
* role-hint="database-update" * role-hint="database-update"
*/ */
public class ArchivaDatabaseUpdateTaskExecutor public class ArchivaDatabaseUpdateTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor, Initializable implements TaskExecutor, Initializable
{ {
private Logger log = LoggerFactory.getLogger( ArchivaDatabaseUpdateTaskExecutor.class );
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
@ -51,7 +53,7 @@ public class ArchivaDatabaseUpdateTaskExecutor
public void initialize() public void initialize()
throws InitializationException throws InitializationException
{ {
getLogger().info( "Initialized " + this.getClass().getName() ); log.info( "Initialized " + this.getClass().getName() );
} }
public void executeTask( Task task ) public void executeTask( Task task )
@ -59,12 +61,12 @@ public class ArchivaDatabaseUpdateTaskExecutor
{ {
DatabaseTask dbtask = (DatabaseTask) task; DatabaseTask dbtask = (DatabaseTask) task;
getLogger().info( "Executing task from queue with job name: " + dbtask.getName() ); log.info( "Executing task from queue with job name: " + dbtask.getName() );
long time = System.currentTimeMillis(); long time = System.currentTimeMillis();
try try
{ {
getLogger().info( "Task: Updating unprocessed artifacts" ); log.info( "Task: Updating unprocessed artifacts" );
databaseUpdater.updateAllUnprocessed(); databaseUpdater.updateAllUnprocessed();
} }
catch ( ArchivaDatabaseException e ) catch ( ArchivaDatabaseException e )
@ -74,7 +76,7 @@ public class ArchivaDatabaseUpdateTaskExecutor
try try
{ {
getLogger().info( "Task: Updating processed artifacts" ); log.info( "Task: Updating processed artifacts" );
databaseUpdater.updateAllProcessed(); databaseUpdater.updateAllProcessed();
} }
catch ( ArchivaDatabaseException e ) catch ( ArchivaDatabaseException e )
@ -84,6 +86,6 @@ public class ArchivaDatabaseUpdateTaskExecutor
time = System.currentTimeMillis() - time; time = System.currentTimeMillis() - time;
getLogger().info( "Finished database task in " + time + "ms." ); log.info( "Finished database task in " + time + "ms." );
} }
} }

View File

@ -19,26 +19,26 @@ package org.apache.maven.archiva.scheduled.executors;
* under the License. * under the License.
*/ */
import java.util.List;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO; import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics; import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.model.RepositoryContentStatistics; import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException; import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics; import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.repository.scanner.RepositoryScanner; import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask; import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task; import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException; import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor; import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.slf4j.Logger;
import java.util.List; import org.slf4j.LoggerFactory;
/** /**
* ArchivaRepositoryScanningTaskExecutor * ArchivaRepositoryScanningTaskExecutor
@ -51,9 +51,10 @@ import java.util.List;
* role-hint="repository-scanning" * role-hint="repository-scanning"
*/ */
public class ArchivaRepositoryScanningTaskExecutor public class ArchivaRepositoryScanningTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor, Initializable implements TaskExecutor, Initializable
{ {
private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class );
/** /**
* @plexus.requirement role-hint="jdo" * @plexus.requirement role-hint="jdo"
*/ */
@ -74,7 +75,7 @@ public class ArchivaRepositoryScanningTaskExecutor
public void initialize() public void initialize()
throws InitializationException throws InitializationException
{ {
getLogger().info( "Initialized " + this.getClass().getName() ); log.info( "Initialized " + this.getClass().getName() );
} }
public void executeTask( Task task ) public void executeTask( Task task )
@ -87,7 +88,7 @@ public class ArchivaRepositoryScanningTaskExecutor
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id."); throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
} }
getLogger().info( "Executing task from queue with job name: " + repoTask.getName() ); log.info( "Executing task from queue with job name: " + repoTask.getName() );
try try
{ {
@ -105,7 +106,7 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen ); RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) ); log.info( "Finished repository task: " + stats.toDump( arepo ) );
// I hate jpox and modello // I hate jpox and modello
RepositoryContentStatistics dbstats = new RepositoryContentStatistics(); RepositoryContentStatistics dbstats = new RepositoryContentStatistics();

View File

@ -19,11 +19,12 @@ package org.apache.maven.archiva.security;
* under the License. * under the License.
*/ */
import org.codehaus.plexus.logging.AbstractLogEnabled; import java.util.List;
import org.codehaus.plexus.redback.rbac.RBACManager; import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck; import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import org.slf4j.Logger;
import java.util.List; import org.slf4j.LoggerFactory;
/** /**
* ArchivaStandardRolesCheck tests for the existance of expected / standard roles and permissions. * ArchivaStandardRolesCheck tests for the existance of expected / standard roles and permissions.
@ -35,9 +36,10 @@ import java.util.List;
* role-hint="required-roles" * role-hint="required-roles"
*/ */
public class ArchivaStandardRolesCheck public class ArchivaStandardRolesCheck
extends AbstractLogEnabled
implements EnvironmentCheck implements EnvironmentCheck
{ {
private Logger log = LoggerFactory.getLogger( ArchivaStandardRolesCheck.class );
/** /**
* @plexus.requirement role-hint="cached" * @plexus.requirement role-hint="cached"
*/ */
@ -61,7 +63,7 @@ public class ArchivaStandardRolesCheck
ArchivaRoleConstants.REGISTERED_USER_ROLE, ArchivaRoleConstants.REGISTERED_USER_ROLE,
ArchivaRoleConstants.USER_ADMINISTRATOR_ROLE }; ArchivaRoleConstants.USER_ADMINISTRATOR_ROLE };
getLogger().info( "Checking the existance of required roles." ); log.info( "Checking the existance of required roles." );
for ( String roleName : expectedRoles ) for ( String roleName : expectedRoles )
{ {
@ -85,7 +87,7 @@ public class ArchivaStandardRolesCheck
ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS, ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS,
"archiva-guest" }; "archiva-guest" };
getLogger().info( "Checking the existance of required operations." ); log.info( "Checking the existance of required operations." );
for ( String operation : expectedOperations ) for ( String operation : expectedOperations )
{ {

View File

@ -19,23 +19,24 @@ package org.apache.maven.archiva.security;
* under the License. * under the License.
*/ */
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException; import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames; import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.redback.rbac.RBACManager; import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.rbac.RbacManagerException; import org.codehaus.plexus.redback.rbac.RbacManagerException;
import org.codehaus.plexus.redback.rbac.UserAssignment; import org.codehaus.plexus.redback.rbac.UserAssignment;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck; import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import java.util.ArrayList; import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/** /**
* SecurityStartup * SecurityStartup
@ -46,9 +47,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.security.SecurityStartup" * @plexus.component role="org.apache.maven.archiva.security.SecurityStartup"
*/ */
public class SecurityStartup public class SecurityStartup
extends AbstractLogEnabled
implements RegistryListener implements RegistryListener
{ {
private Logger log = LoggerFactory.getLogger( SecurityStartup.class );
/** /**
* @plexus.requirement * @plexus.requirement
*/ */
@ -104,7 +106,7 @@ public class SecurityStartup
} }
catch ( RbacManagerException e ) catch ( RbacManagerException e )
{ {
getLogger().warn( log.warn(
"Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId ) "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId )
+ "] to " + principal + " user.", e ); + "] to " + principal + " user.", e );
} }
@ -129,7 +131,7 @@ public class SecurityStartup
} }
catch ( ArchivaSecurityException e ) catch ( ArchivaSecurityException e )
{ {
getLogger().warn( e.getMessage(), e ); log.warn( e.getMessage(), e );
} }
} }
} }
@ -145,7 +147,7 @@ public class SecurityStartup
} }
catch ( ArchivaSecurityException e ) catch ( ArchivaSecurityException e )
{ {
getLogger().warn( e.getMessage(), e ); log.warn( e.getMessage(), e );
} }
} }
} }
@ -164,7 +166,7 @@ public class SecurityStartup
for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() ) for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() )
{ {
EnvironmentCheck check = entry.getValue(); EnvironmentCheck check = entry.getValue();
getLogger().info( "Running Environment Check: " + entry.getKey() ); log.info( "Running Environment Check: " + entry.getKey() );
check.validateEnvironment( violations ); check.validateEnvironment( violations );
} }
@ -183,7 +185,7 @@ public class SecurityStartup
msg.append( "\n" ); msg.append( "\n" );
msg.append( "======================================================================" ); msg.append( "======================================================================" );
getLogger().fatalError( msg.toString() ); log.error( msg.toString() );
throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size() throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size()
+ "] violation(s) encountered, See log for details." ); + "] violation(s) encountered, See log for details." );

View File

@ -23,7 +23,6 @@ import com.opensymphony.webwork.ServletActionContext;
import com.opensymphony.xwork.ActionInvocation; import com.opensymphony.xwork.ActionInvocation;
import com.opensymphony.xwork.interceptor.Interceptor; import com.opensymphony.xwork.interceptor.Interceptor;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
@ -35,7 +34,6 @@ import javax.servlet.ServletContext;
* role-hint="configurationInterceptor" * role-hint="configurationInterceptor"
*/ */
public class ConfigurationInterceptor public class ConfigurationInterceptor
extends AbstractLogEnabled
implements Interceptor implements Interceptor
{ {
/** /**

View File

@ -21,9 +21,10 @@ package org.apache.maven.archiva.web.startup;
import org.apache.maven.archiva.common.ArchivaException; import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler; import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* ArchivaStartup - the startup of all archiva features in a deterministic order. * ArchivaStartup - the startup of all archiva features in a deterministic order.
@ -36,7 +37,6 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationExce
* role-hint="default" * role-hint="default"
*/ */
public class ArchivaStartup public class ArchivaStartup
extends AbstractLogEnabled
implements Initializable implements Initializable
{ {
/** /**
@ -57,7 +57,7 @@ public class ArchivaStartup
public void initialize() public void initialize()
throws InitializationException throws InitializationException
{ {
Banner.display( getLogger(), ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) ); Banner.display( ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
try try
{ {

View File

@ -19,14 +19,13 @@ package org.apache.maven.archiva.web.startup;
* under the License. * under the License.
*/ */
import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.slf4j.LoggerFactory;
/** /**
* Banner * Banner
* *
@ -38,7 +37,6 @@ import java.util.regex.Pattern;
* role-hint="default" * role-hint="default"
*/ */
public class Banner public class Banner
extends AbstractLogEnabled
{ {
public static String encode( String raw ) public static String encode( String raw )
{ {
@ -224,15 +222,15 @@ public class Banner
return injectVersion( decode( encodedBanner ), version ); return injectVersion( decode( encodedBanner ), version );
} }
public static void display( Logger logger, String version ) public static void display( String version )
{ {
String banner = getBanner( version ); String banner = getBanner( version );
logger.info( StringUtils.repeat( "_", 25 ) + "\n" + banner ); LoggerFactory.getLogger( Banner.class ).info( StringUtils.repeat( "_", 25 ) + "\n" + banner );
} }
public void initialize() public void initialize()
throws InitializationException throws InitializationException
{ {
Banner.display( getLogger(), ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) ); Banner.display( ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
} }
} }

View File

@ -23,7 +23,6 @@ import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.database.project.ProjectModelToDatabaseListener; import org.apache.maven.archiva.database.project.ProjectModelToDatabaseListener;
import org.apache.maven.archiva.repository.project.ProjectModelResolver; import org.apache.maven.archiva.repository.project.ProjectModelResolver;
import org.apache.maven.archiva.repository.project.ProjectModelResolverFactory; import org.apache.maven.archiva.repository.project.ProjectModelResolverFactory;
import org.codehaus.plexus.logging.AbstractLogEnabled;
/** /**
* ResolverFactoryInit - Initialize the Resolver Factory, and hook it up to * ResolverFactoryInit - Initialize the Resolver Factory, and hook it up to
@ -37,7 +36,6 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* role-hint="default" * role-hint="default"
*/ */
public class ResolverFactoryInit public class ResolverFactoryInit
extends AbstractLogEnabled
{ {
/** /**
* @plexus.requirement role-hint="database" * @plexus.requirement role-hint="database"

View File

@ -19,13 +19,17 @@ package org.apache.maven.archiva.web.startup;
* under the License. * under the License.
*/ */
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException; import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration; import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames; import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration; import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.security.ArchivaRoleConstants; import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.redback.rbac.RBACManager; import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.rbac.RbacManagerException; import org.codehaus.plexus.redback.rbac.RbacManagerException;
import org.codehaus.plexus.redback.rbac.UserAssignment; import org.codehaus.plexus.redback.rbac.UserAssignment;
@ -34,11 +38,8 @@ import org.codehaus.plexus.redback.role.RoleManagerException;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck; import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import org.codehaus.plexus.registry.Registry; import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener; import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import java.util.ArrayList; import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/** /**
* ConfigurationSynchronization * ConfigurationSynchronization
@ -50,9 +51,10 @@ import java.util.Map.Entry;
* role-hint="default" * role-hint="default"
*/ */
public class SecuritySynchronization public class SecuritySynchronization
extends AbstractLogEnabled
implements RegistryListener implements RegistryListener
{ {
private Logger log = LoggerFactory.getLogger( SecuritySynchronization.class );
/** /**
* @plexus.requirement role-hint="default" * @plexus.requirement role-hint="default"
*/ */
@ -112,7 +114,7 @@ public class SecuritySynchronization
catch ( RoleManagerException e ) catch ( RoleManagerException e )
{ {
// Log error. // Log error.
getLogger().error( "Unable to create roles for configured repositories: " + e.getMessage(), e ); log.error( "Unable to create roles for configured repositories: " + e.getMessage(), e );
} }
} }
} }
@ -145,7 +147,7 @@ public class SecuritySynchronization
for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() ) for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() )
{ {
EnvironmentCheck check = entry.getValue(); EnvironmentCheck check = entry.getValue();
getLogger().info( "Running Environment Check: " + entry.getKey() ); log.info( "Running Environment Check: " + entry.getKey() );
check.validateEnvironment( violations ); check.validateEnvironment( violations );
} }
@ -164,7 +166,7 @@ public class SecuritySynchronization
msg.append( "\n" ); msg.append( "\n" );
msg.append( "======================================================================" ); msg.append( "======================================================================" );
getLogger().fatalError( msg.toString() ); log.error( msg.toString() );
throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size() throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size()
+ "] violation(s) encountered, See log for details." ); + "] violation(s) encountered, See log for details." );
@ -198,7 +200,7 @@ public class SecuritySynchronization
} }
catch ( RbacManagerException e ) catch ( RbacManagerException e )
{ {
getLogger().warn( "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId ) log.warn( "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId )
+ "] to " + principal + " user.", e ); + "] to " + principal + " user.", e );
} }
} }

View File

@ -19,15 +19,17 @@ package org.apache.maven.archiva.web.tags;
* under the License. * under the License.
*/ */
import org.apache.commons.lang.StringEscapeUtils; import java.io.IOException;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import javax.servlet.jsp.JspException; import javax.servlet.jsp.JspException;
import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.JspWriter;
import javax.servlet.jsp.PageContext; import javax.servlet.jsp.PageContext;
import java.io.IOException;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* CopyPasteSnippet * CopyPasteSnippet
@ -37,8 +39,9 @@ import java.io.IOException;
* @plexus.component role="org.apache.maven.archiva.web.tags.CopyPasteSnippet" * @plexus.component role="org.apache.maven.archiva.web.tags.CopyPasteSnippet"
*/ */
public class CopyPasteSnippet public class CopyPasteSnippet
extends AbstractLogEnabled
{ {
private Logger log = LoggerFactory.getLogger( CopyPasteSnippet.class );
public static final String PRE = "pre"; public static final String PRE = "pre";
public static final String TOGGLE = "toggle"; public static final String TOGGLE = "toggle";
@ -53,7 +56,7 @@ public class CopyPasteSnippet
if ( o == null ) if ( o == null )
{ {
buf.append( "Error generating snippet." ); buf.append( "Error generating snippet." );
getLogger().error( "Unable to generate snippet for null object." ); log.error( "Unable to generate snippet for null object." );
} }
else if ( o instanceof ManagedRepositoryConfiguration ) else if ( o instanceof ManagedRepositoryConfiguration )
{ {

View File

@ -19,6 +19,12 @@ package org.apache.maven.archiva.web.tags;
* under the License. * under the License.
*/ */
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import javax.servlet.jsp.PageContext;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.ArchivaException; import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.dependency.DependencyGraphFactory; import org.apache.maven.archiva.dependency.DependencyGraphFactory;
@ -34,15 +40,10 @@ import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.DependencyScope; import org.apache.maven.archiva.model.DependencyScope;
import org.apache.maven.archiva.model.Keys; import org.apache.maven.archiva.model.Keys;
import org.apache.maven.archiva.model.VersionedReference; import org.apache.maven.archiva.model.VersionedReference;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable; import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException; import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.slf4j.Logger;
import java.util.ArrayList; import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Stack;
import javax.servlet.jsp.PageContext;
/** /**
* DependencyTree * DependencyTree
@ -53,9 +54,10 @@ import javax.servlet.jsp.PageContext;
* @plexus.component role="org.apache.maven.archiva.web.tags.DependencyTree" * @plexus.component role="org.apache.maven.archiva.web.tags.DependencyTree"
*/ */
public class DependencyTree public class DependencyTree
extends AbstractLogEnabled
implements Initializable implements Initializable
{ {
private Logger log = LoggerFactory.getLogger( DependencyTree.class );
/** /**
* @plexus.requirement * @plexus.requirement
* role="org.apache.maven.archiva.dependency.graph.DependencyGraphBuilder" * role="org.apache.maven.archiva.dependency.graph.DependencyGraphBuilder"
@ -121,7 +123,7 @@ public class DependencyTree
{ {
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion ) String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: groupId is blank."; + "]: groupId is blank.";
getLogger().error( emsg ); log.error( emsg );
throw new ArchivaException( emsg ); throw new ArchivaException( emsg );
} }
@ -129,7 +131,7 @@ public class DependencyTree
{ {
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion ) String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: artifactId is blank."; + "]: artifactId is blank.";
getLogger().error( emsg ); log.error( emsg );
throw new ArchivaException( emsg ); throw new ArchivaException( emsg );
} }
@ -137,7 +139,7 @@ public class DependencyTree
{ {
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion ) String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: version is blank."; + "]: version is blank.";
getLogger().error( emsg ); log.error( emsg );
throw new ArchivaException( emsg ); throw new ArchivaException( emsg );
} }
@ -249,7 +251,7 @@ public class DependencyTree
catch ( GraphTaskException e ) catch ( GraphTaskException e )
{ {
String emsg = "Unable to generate graph for [" + Keys.toKey( projectRef ) + "] : " + e.getMessage(); String emsg = "Unable to generate graph for [" + Keys.toKey( projectRef ) + "] : " + e.getMessage();
getLogger().warn( emsg, e ); log.warn( emsg, e );
throw new ArchivaException( emsg, e ); throw new ArchivaException( emsg, e );
} }
} }