MRM-708 - Migrate from Plexus Logging to Slf4J

* Finishing up AbstractLogEnabled conversions to Slf4J.



git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/trunk@629704 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2008-02-21 07:31:33 +00:00
parent 45fca34ec2
commit 5f27f502ff
37 changed files with 354 additions and 322 deletions

View File

@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
*/
public class Checksums
{
private static final Logger LOG = LoggerFactory.getLogger(Checksums.class);
private Logger log = LoggerFactory.getLogger(Checksums.class);
/**
* @plexus.requirement role-hint="sha1"
@ -66,7 +66,7 @@ public class Checksums
// Both files missing is a failure.
if ( !sha1File.exists() && !md5File.exists() )
{
LOG.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." );
log.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." );
checksPass = false;
}
@ -75,7 +75,7 @@ public class Checksums
// Bad sha1 checksum is a failure.
if ( !validateChecksum( sha1File, "sha1" ) )
{
LOG.warn( "SHA1 is incorrect for " + file.getPath() );
log.warn( "SHA1 is incorrect for " + file.getPath() );
checksPass = false;
}
}
@ -85,7 +85,7 @@ public class Checksums
// Bad md5 checksum is a failure.
if ( !validateChecksum( md5File, "md5" ) )
{
LOG.warn( "MD5 is incorrect for " + file.getPath() );
log.warn( "MD5 is incorrect for " + file.getPath() );
checksPass = false;
}
}
@ -139,12 +139,12 @@ public class Checksums
}
catch ( DigesterException e )
{
LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false;
}
catch ( IOException e )
{
LOG.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false;
}
}
@ -169,28 +169,28 @@ public class Checksums
{
if ( checksumFile.isValidChecksum( hashFile ) )
{
LOG.debug( "Valid checksum: " + hashFile.getPath() );
log.debug( "Valid checksum: " + hashFile.getPath() );
return true;
}
else
{
LOG.debug( "Not valid checksum: " + hashFile.getPath() );
log.debug( "Not valid checksum: " + hashFile.getPath() );
return createChecksum( localFile, digester );
}
}
catch ( FileNotFoundException e )
{
LOG.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
LOG.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
}
@ -213,27 +213,27 @@ public class Checksums
boolean validity = checksumFile.isValidChecksum( hashFile );
if ( validity )
{
LOG.debug( "Valid checksum: " + hashFile.getPath() );
log.debug( "Valid checksum: " + hashFile.getPath() );
}
else
{
LOG.debug( "Not valid checksum: " + hashFile.getPath() );
log.debug( "Not valid checksum: " + hashFile.getPath() );
}
return validity;
}
catch ( FileNotFoundException e )
{
LOG.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
LOG.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
}

View File

@ -36,12 +36,13 @@ import org.codehaus.plexus.evaluator.DefaultExpressionEvaluator;
import org.codehaus.plexus.evaluator.EvaluatorException;
import org.codehaus.plexus.evaluator.ExpressionEvaluator;
import org.codehaus.plexus.evaluator.sources.SystemPropertyExpressionSource;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.RegistryListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
@ -83,9 +84,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration"
*/
public class DefaultArchivaConfiguration
extends AbstractLogEnabled
implements ArchivaConfiguration, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger(DefaultArchivaConfiguration.class);
/**
* Plexus registry to read the configuration from.
*
@ -287,7 +289,7 @@ public class DefaultArchivaConfiguration
else
{
// Policy key doesn't exist. Don't add it to golden version.
getLogger().warn( "Policy [" + policyId + "] does not exist." );
log.warn( "Policy [" + policyId + "] does not exist." );
}
}
@ -323,13 +325,13 @@ public class DefaultArchivaConfiguration
{
if ( MapUtils.isEmpty( prePolicies ) )
{
getLogger().error( "No PreDownloadPolicies found!" );
log.error( "No PreDownloadPolicies found!" );
return null;
}
if ( MapUtils.isEmpty( postPolicies ) )
{
getLogger().error( "No PostDownloadPolicies found!" );
log.error( "No PostDownloadPolicies found!" );
return null;
}
@ -354,13 +356,13 @@ public class DefaultArchivaConfiguration
{
if ( MapUtils.isEmpty( prePolicies ) )
{
getLogger().error( "No PreDownloadPolicies found!" );
log.error( "No PreDownloadPolicies found!" );
return false;
}
if ( MapUtils.isEmpty( postPolicies ) )
{
getLogger().error( "No PostDownloadPolicies found!" );
log.error( "No PostDownloadPolicies found!" );
return false;
}
@ -505,7 +507,7 @@ public class DefaultArchivaConfiguration
}
catch ( IOException e )
{
getLogger().error( "Unable to create " + filetype + " file: " + e.getMessage(), e );
log.error( "Unable to create " + filetype + " file: " + e.getMessage(), e );
return false;
}
}
@ -521,7 +523,7 @@ public class DefaultArchivaConfiguration
}
catch ( Throwable t )
{
getLogger().warn( "Unable to notify of saved configuration event.", t );
log.warn( "Unable to notify of saved configuration event.", t );
}
}
}

View File

@ -19,17 +19,6 @@ package org.apache.maven.archiva.configuration;
* under the License.
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.configuration.CombinedConfiguration;
import org.apache.maven.archiva.configuration.functors.FiletypeSelectionPredicate;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
@ -37,6 +26,19 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.configuration.CombinedConfiguration;
import org.apache.maven.archiva.common.utils.Slf4JPlexusLogger;
import org.apache.maven.archiva.configuration.functors.FiletypeSelectionPredicate;
import org.apache.maven.archiva.configuration.io.registry.ConfigurationRegistryReader;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.commons.CommonsConfigurationRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* FileTypes
*
@ -46,9 +48,10 @@ import java.util.Map;
* @plexus.component role="org.apache.maven.archiva.configuration.FileTypes"
*/
public class FileTypes
extends AbstractLogEnabled
implements Initializable
{
private Logger log = LoggerFactory.getLogger(FileTypes.class);
public static final String ARTIFACTS = "artifacts";
public static final String AUTO_REMOVE = "auto-remove";
@ -122,7 +125,7 @@ public class FileTypes
Field fld = commonsRegistry.getClass().getDeclaredField( "configuration" );
fld.setAccessible( true );
fld.set( commonsRegistry, new CombinedConfiguration() );
commonsRegistry.enableLogging( getLogger() );
commonsRegistry.enableLogging( new Slf4JPlexusLogger( FileTypes.class ) );
commonsRegistry.addConfigurationFromResource( "org/apache/maven/archiva/configuration/default-archiva.xml" );
// Read configuration as it was intended.

View File

@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors;
import org.apache.commons.collections.Predicate;
import org.apache.maven.archiva.indexer.RepositoryContentIndex;
import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test the {@link RepositoryContentIndex} object for the existance of an index.
@ -35,9 +36,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* role-hint="index-exists"
*/
public class IndexExistsPredicate
extends AbstractLogEnabled
implements Predicate
{
private Logger log = LoggerFactory.getLogger( IndexExistsPredicate.class );
public boolean evaluate( Object object )
{
boolean satisfies = false;
@ -51,7 +53,7 @@ public class IndexExistsPredicate
}
catch ( RepositoryIndexException e )
{
getLogger().info(
log.info(
"Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." );

View File

@ -22,7 +22,8 @@ package org.apache.maven.archiva.indexer.functors;
import org.apache.commons.collections.Transformer;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentIndex;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SearchableTransformer
@ -33,9 +34,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* @plexus.component role="org.apache.commons.collections.Transformer" role-hint="searchable"
*/
public class SearchableTransformer
extends AbstractLogEnabled
implements Transformer
{
private Logger log = LoggerFactory.getLogger( SearchableTransformer.class );
public Object transform( Object input )
{
if ( input instanceof LuceneRepositoryContentIndex )
@ -47,7 +49,7 @@ public class SearchableTransformer
}
catch ( RepositoryIndexSearchException e )
{
getLogger().warn("Unable to get searchable for index:" + e.getMessage(), e);
log.warn("Unable to get searchable for index:" + e.getMessage(), e);
}
}

View File

@ -19,9 +19,10 @@ package org.apache.maven.archiva.indexer.search;
* under the License.
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.Predicate;
import org.apache.commons.collections.Transformer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.document.Document;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
@ -38,21 +39,17 @@ import org.apache.maven.archiva.indexer.RepositoryIndexException;
import org.apache.maven.archiva.indexer.RepositoryIndexSearchException;
import org.apache.maven.archiva.indexer.bytecode.BytecodeHandlers;
import org.apache.maven.archiva.indexer.filecontent.FileContentHandlers;
import org.apache.maven.archiva.indexer.functors.UserAllowedToSearchRepositoryPredicate;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesHandlers;
import org.apache.maven.archiva.indexer.hashcodes.HashcodesKeys;
import org.apache.maven.archiva.indexer.lucene.LuceneEntryConverter;
import org.apache.maven.archiva.indexer.lucene.LuceneQuery;
import org.apache.maven.archiva.indexer.lucene.LuceneRepositoryContentRecord;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DefaultCrossRepositorySearch
@ -62,9 +59,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.indexer.search.CrossRepositorySearch" role-hint="default"
*/
public class DefaultCrossRepositorySearch
extends AbstractLogEnabled
implements CrossRepositorySearch, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( DefaultCrossRepositorySearch.class );
/**
* @plexus.requirement role-hint="lucene"
*/
@ -93,7 +91,7 @@ public class DefaultCrossRepositorySearch
}
catch ( ParseException e )
{
getLogger().warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e );
log.warn( "Unable to parse query [" + checksum + "]: " + e.getMessage(), e );
}
// empty results.
@ -115,7 +113,7 @@ public class DefaultCrossRepositorySearch
}
catch ( ParseException e )
{
getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
}
// empty results.
@ -137,7 +135,7 @@ public class DefaultCrossRepositorySearch
}
catch ( ParseException e )
{
getLogger().warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
log.warn( "Unable to parse query [" + term + "]: " + e.getMessage(), e );
}
// empty results.
@ -212,7 +210,7 @@ public class DefaultCrossRepositorySearch
}
catch ( java.text.ParseException e )
{
getLogger().warn( "Unable to parse document into record: " + e.getMessage(), e );
log.warn( "Unable to parse document into record: " + e.getMessage(), e );
}
}
}
@ -220,7 +218,7 @@ public class DefaultCrossRepositorySearch
}
catch ( IOException e )
{
getLogger().error( "Unable to setup multi-search: " + e.getMessage(), e );
log.error( "Unable to setup multi-search: " + e.getMessage(), e );
}
finally
{
@ -233,7 +231,7 @@ public class DefaultCrossRepositorySearch
}
catch ( IOException ie )
{
getLogger().error( "Unable to close index searcher: " + ie.getMessage(), ie );
log.error( "Unable to close index searcher: " + ie.getMessage(), ie );
}
}
@ -251,7 +249,7 @@ public class DefaultCrossRepositorySearch
}
catch ( RepositoryIndexSearchException e )
{
getLogger().warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
log.warn( "Unable to get searchable for index [" + contentIndex.getId() + "] :"
+ e.getMessage(), e );
}
}
@ -329,7 +327,7 @@ public class DefaultCrossRepositorySearch
}
catch ( RepositoryIndexException e )
{
getLogger().info(
log.info(
"Repository Content Index [" + index.getId() + "] for repository ["
+ index.getRepository().getId() + "] does not exist yet in ["
+ index.getIndexDirectory().getAbsolutePath() + "]." );

View File

@ -19,16 +19,17 @@ package org.apache.maven.archiva.policies;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AbstractUpdatePolicy
*
@ -36,9 +37,10 @@ import java.util.Properties;
* @version $Id$
*/
public abstract class AbstractUpdatePolicy
extends AbstractLogEnabled
implements PreDownloadPolicy
{
private Logger log = LoggerFactory.getLogger( AbstractUpdatePolicy.class );
/**
* The ALWAYS policy setting means that the artifact is always uipdated from the remote repo.
*/
@ -127,20 +129,20 @@ public abstract class AbstractUpdatePolicy
if ( ALWAYS.equals( policySetting ) )
{
// Skip means ok to update.
getLogger().debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." );
log.debug( "OK to update, " + getUpdateMode() + " policy set to ALWAYS." );
return;
}
// Test for mismatches.
if ( !isSnapshotVersion && isSnapshotPolicy() )
{
getLogger().debug( "OK to update, snapshot policy does not apply for non-snapshot versions." );
log.debug( "OK to update, snapshot policy does not apply for non-snapshot versions." );
return;
}
if ( isSnapshotVersion && !isSnapshotPolicy() )
{
getLogger().debug( "OK to update, release policy does not apply for snapshot versions." );
log.debug( "OK to update, release policy does not apply for snapshot versions." );
return;
}
@ -153,7 +155,7 @@ public abstract class AbstractUpdatePolicy
if ( !localFile.exists() )
{
// No file means it's ok.
getLogger().debug( "OK to update " + getUpdateMode() + ", local file does not exist." );
log.debug( "OK to update " + getUpdateMode() + ", local file does not exist." );
return;
}

View File

@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link PreDownloadPolicy} to check if the requested url has failed before.
*
@ -37,9 +38,10 @@ import java.util.Properties;
* role-hint="cache-failures"
*/
public class CachedFailuresPolicy
extends AbstractLogEnabled
implements PreDownloadPolicy
{
private Logger log = LoggerFactory.getLogger( CachedFailuresPolicy.class );
/**
* The NO policy setting means that the the existence of old failures is <strong>not</strong> checked.
* All resource requests are allowed thru to the remote repo.
@ -78,7 +80,7 @@ public class CachedFailuresPolicy
if ( NO.equals( policySetting ) )
{
// Skip.
getLogger().debug( "OK to fetch, check-failures policy set to NO." );
log.debug( "OK to fetch, check-failures policy set to NO." );
return;
}
@ -92,7 +94,7 @@ public class CachedFailuresPolicy
}
}
getLogger().debug( "OK to fetch, check-failures detected no issues." );
log.debug( "OK to fetch, check-failures detected no issues." );
}
public String getDefaultOption()

View File

@ -19,15 +19,16 @@ package org.apache.maven.archiva.policies;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ChecksumPolicy - a policy applied after the download to see if the file has been downloaded
* successfully and completely (or not).
@ -39,9 +40,10 @@ import java.util.Properties;
* role-hint="checksum"
*/
public class ChecksumPolicy
extends AbstractLogEnabled
implements PostDownloadPolicy
{
private Logger log = LoggerFactory.getLogger( ChecksumPolicy.class );
/**
* The IGNORE policy indicates that if the checksum policy is ignored, and
* the state of, contents of, or validity of the checksum files are not
@ -90,7 +92,7 @@ public class ChecksumPolicy
if ( IGNORE.equals( policySetting ) )
{
// Ignore.
getLogger().debug( "Checksum policy set to IGNORE." );
log.debug( "Checksum policy set to IGNORE." );
return;
}
@ -131,7 +133,7 @@ public class ChecksumPolicy
{
if( checksums.update( localFile ) )
{
getLogger().debug( "Checksum policy set to FIX, checksum files have been updated." );
log.debug( "Checksum policy set to FIX, checksum files have been updated." );
return;
}
else

View File

@ -19,9 +19,18 @@ package org.apache.maven.archiva.proxy;
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -56,24 +65,13 @@ import org.apache.maven.wagon.authentication.AuthenticationException;
import org.apache.maven.wagon.authentication.AuthenticationInfo;
import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.repository.Repository;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import org.codehaus.plexus.util.SelectorUtils;
import java.io.File;
import java.io.IOException;
import java.net.URLClassLoader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DefaultRepositoryProxyConnectors
@ -83,9 +81,10 @@ import java.util.Properties;
* @plexus.component role-hint="default"
*/
public class DefaultRepositoryProxyConnectors
extends AbstractLogEnabled
implements RepositoryProxyConnectors, RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( DefaultRepositoryProxyConnectors.class );
/**
* @plexus.requirement
*/
@ -160,29 +159,29 @@ public class DefaultRepositoryProxyConnectors
if ( fileExists( downloadedFile ) )
{
getLogger().debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() );
log.debug( "Successfully transferred: " + downloadedFile.getAbsolutePath() );
return downloadedFile;
}
}
catch ( NotFoundException e )
{
getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \""
log.debug( "Artifact " + Keys.toKey( artifact ) + " not found on repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( NotModifiedException e )
{
getLogger().debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \""
log.debug( "Artifact " + Keys.toKey( artifact ) + " not updated on repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( ProxyException e )
{
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for artifact " + Keys.toKey( artifact ) + ", continuing to next repository. Error message: " +
e.getMessage() );
getLogger().debug( "Full stack trace", e );
log.debug( "Full stack trace", e );
}
}
getLogger().debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." );
log.debug( "Exhausted all target repositories, artifact " + Keys.toKey( artifact ) + " not found." );
return null;
}
@ -221,22 +220,22 @@ public class DefaultRepositoryProxyConnectors
}
catch ( NotFoundException e )
{
getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata )
log.debug( "Versioned Metadata " + Keys.toKey( metadata )
+ " not found on remote repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( NotModifiedException e )
{
getLogger().debug( "Versioned Metadata " + Keys.toKey( metadata )
log.debug( "Versioned Metadata " + Keys.toKey( metadata )
+ " not updated on remote repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( ProxyException e )
{
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for versioned Metadata " + Keys.toKey( metadata ) +
", continuing to next repository. Error message: " + e.getMessage() );
getLogger().debug( "Full stack trace", e );
log.debug( "Full stack trace", e );
}
}
@ -253,24 +252,24 @@ public class DefaultRepositoryProxyConnectors
}
catch ( LayoutException e )
{
getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
// TODO: add into repository report?
}
catch ( RepositoryMetadataException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
catch ( IOException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
catch ( ContentNotFoundException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
@ -339,21 +338,21 @@ public class DefaultRepositoryProxyConnectors
}
catch ( NotFoundException e )
{
getLogger().debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \""
log.debug( "Project Metadata " + Keys.toKey( metadata ) + " not found on remote repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( NotModifiedException e )
{
getLogger().debug( "Project Metadata " + Keys.toKey( metadata )
log.debug( "Project Metadata " + Keys.toKey( metadata )
+ " not updated on remote repository \""
+ targetRepository.getRepository().getId() + "\"." );
}
catch ( ProxyException e )
{
getLogger().warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
log.warn( "Transfer error from repository \"" + targetRepository.getRepository().getId() +
"\" for project metadata " + Keys.toKey( metadata ) +
", continuing to next repository. Error message: " + e.getMessage() );
getLogger().debug( "Full stack trace", e );
log.debug( "Full stack trace", e );
}
}
@ -371,24 +370,24 @@ public class DefaultRepositoryProxyConnectors
}
catch ( LayoutException e )
{
getLogger().warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
log.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage() );
// TODO: add into repository report?
}
catch ( RepositoryMetadataException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
catch ( IOException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
catch ( ContentNotFoundException e )
{
getLogger()
log
.warn( "Unable to update metadata " + localFile.getAbsolutePath() + ": " + e.getMessage(), e );
// TODO: add into repository report?
}
@ -495,7 +494,7 @@ public class DefaultRepositoryProxyConnectors
// Path must belong to whitelist.
if ( !matchesPattern( remotePath, connector.getWhitelist() ) )
{
getLogger().debug( "Path [" + remotePath +
log.debug( "Path [" + remotePath +
"] is not part of defined whitelist (skipping transfer from repository [" +
remoteRepository.getRepository().getName() + "])." );
return null;
@ -505,7 +504,7 @@ public class DefaultRepositoryProxyConnectors
// Is target path part of blacklist?
if ( matchesPattern( remotePath, connector.getBlacklist() ) )
{
getLogger().debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" +
log.debug( "Path [" + remotePath + "] is part of blacklist (skipping transfer from repository [" +
remoteRepository.getRepository().getName() + "])." );
return null;
}
@ -520,11 +519,11 @@ public class DefaultRepositoryProxyConnectors
String emsg = "Transfer not attempted on " + url + " : " + e.getMessage();
if ( fileExists( localFile ) )
{
getLogger().info( emsg + ": using already present local file." );
log.info( emsg + ": using already present local file." );
return localFile;
}
getLogger().info( emsg );
log.info( emsg );
return null;
}
@ -573,7 +572,7 @@ public class DefaultRepositoryProxyConnectors
}
catch ( ConnectionException e )
{
getLogger().warn( "Unable to disconnect wagon.", e );
log.warn( "Unable to disconnect wagon.", e );
}
}
}
@ -585,7 +584,7 @@ public class DefaultRepositoryProxyConnectors
}
catch ( PolicyViolationException e )
{
getLogger().info( "Transfer invalidated from " + url + " : " + e.getMessage() );
log.info( "Transfer invalidated from " + url + " : " + e.getMessage() );
if ( fileExists( localFile ) )
{
return localFile;
@ -629,22 +628,22 @@ public class DefaultRepositoryProxyConnectors
{
File hashFile = new File( localFile.getAbsolutePath() + type );
transferSimpleFile( wagon, remoteRepository, remotePath + type, hashFile );
getLogger().debug( "Checksum" + type + " Downloaded: " + hashFile );
log.debug( "Checksum" + type + " Downloaded: " + hashFile );
}
catch ( NotFoundException e )
{
getLogger().debug( "Transfer failed, checksum not found: " + url );
log.debug( "Transfer failed, checksum not found: " + url );
// Consume it, do not pass this on.
}
catch ( NotModifiedException e )
{
getLogger().debug( "Transfer skipped, checksum not modified: " + url );
log.debug( "Transfer skipped, checksum not modified: " + url );
// Consume it, do not pass this on.
}
catch ( ProxyException e )
{
urlFailureCache.cacheFailure( url + type );
getLogger().warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e );
log.warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e );
// Critical issue, pass it on.
throw e;
}
@ -678,7 +677,7 @@ public class DefaultRepositoryProxyConnectors
if ( !localFile.exists() )
{
getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() );
log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName() );
wagon.get( remotePath, temp );
success = true;
@ -688,11 +687,11 @@ public class DefaultRepositoryProxyConnectors
}
// You wouldn't get here on failure, a WagonException would have been thrown.
getLogger().debug( "Downloaded successfully." );
log.debug( "Downloaded successfully." );
}
else
{
getLogger().debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName()
log.debug( "Retrieving " + remotePath + " from " + remoteRepository.getRepository().getName()
+ " if updated" );
success = wagon.getIfNewer( remotePath, temp, localFile.lastModified() );
if ( !success )
@ -703,7 +702,7 @@ public class DefaultRepositoryProxyConnectors
if ( temp.exists() )
{
getLogger().debug( "Downloaded successfully." );
log.debug( "Downloaded successfully." );
moveTempToTarget( temp, localFile );
}
}
@ -748,14 +747,14 @@ public class DefaultRepositoryProxyConnectors
String defaultSetting = policy.getDefaultOption();
String setting = StringUtils.defaultString( (String) settings.get( key ), defaultSetting );
getLogger().debug( "Applying [" + key + "] policy with [" + setting + "]" );
log.debug( "Applying [" + key + "] policy with [" + setting + "]" );
try
{
policy.applyPolicy( setting, request, localFile );
}
catch ( PolicyConfigurationException e )
{
getLogger().error( e.getMessage(), e );
log.error( e.getMessage(), e );
}
}
}
@ -778,7 +777,7 @@ public class DefaultRepositoryProxyConnectors
if ( !temp.renameTo( target ) )
{
getLogger().warn( "Unable to rename tmp file to its final name... resorting to copy command." );
log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
try
{
@ -821,7 +820,7 @@ public class DefaultRepositoryProxyConnectors
if ( StringUtils.isNotBlank( username ) && StringUtils.isNotBlank( password ) )
{
getLogger().debug( "Using username " + username + " to connect to remote repository "
log.debug( "Using username " + username + " to connect to remote repository "
+ remoteRepository.getURL() );
authInfo = new AuthenticationInfo();
authInfo.setUserName( username );
@ -829,7 +828,7 @@ public class DefaultRepositoryProxyConnectors
}
else
{
getLogger().debug( "No authentication for remote repository needed" );
log.debug( "No authentication for remote repository needed" );
}
//Convert seconds to milliseconds
@ -851,14 +850,14 @@ public class DefaultRepositoryProxyConnectors
}
catch ( ConnectionException e )
{
getLogger().warn(
log.warn(
"Could not connect to " + remoteRepository.getRepository().getName() + ": "
+ e.getMessage() );
connected = false;
}
catch ( AuthenticationException e )
{
getLogger().warn(
log.warn(
"Could not connect to " + remoteRepository.getRepository().getName() + ": "
+ e.getMessage() );
connected = false;
@ -925,6 +924,16 @@ public class DefaultRepositoryProxyConnectors
{
/* do nothing */
}
private void logProcess( String managedRepoId, String resource, String event )
{
}
private void logRejection( String managedRepoId, String remoteRepoId, String resource, String reason )
{
}
private void initConnectorsAndNetworkProxies()
{
@ -988,11 +997,11 @@ public class DefaultRepositoryProxyConnectors
}
catch ( RepositoryNotFoundException e )
{
getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e );
log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
}
catch ( RepositoryException e )
{
getLogger().warn( "Unable to use proxy connector: " + e.getMessage(), e );
log.warn( "Unable to use proxy connector: " + e.getMessage(), e );
}
}

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.proxy;
* under the License.
*/
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException;
@ -31,11 +35,8 @@ import org.apache.maven.wagon.events.SessionListener;
import org.apache.maven.wagon.events.TransferListener;
import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.repository.Repository;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A dummy wagon implementation
@ -43,9 +44,10 @@ import java.util.List;
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
*/
public class WagonDelegate
extends AbstractLogEnabled
implements Wagon
{
private Logger log = LoggerFactory.getLogger( WagonDelegate.class );
private Wagon delegate;
private String contentToGet;
@ -53,7 +55,7 @@ public class WagonDelegate
public void get( String resourceName, File destination )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{
getLogger().debug( ".get(" + resourceName + ", " + destination + ")" );
log.debug( ".get(" + resourceName + ", " + destination + ")" );
delegate.get( resourceName, destination );
create( destination );
}
@ -61,7 +63,7 @@ public class WagonDelegate
public boolean getIfNewer( String resourceName, File destination, long timestamp )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{
getLogger().info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" );
log.info( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" );
boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
createIfMissing( destination );

View File

@ -19,7 +19,8 @@ package org.apache.maven.archiva.repository.audit;
* under the License.
*/
import org.apache.log4j.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* AuditLog - Audit Log.
@ -33,7 +34,7 @@ import org.apache.log4j.Logger;
public class AuditLog
implements AuditListener
{
public static final Logger logger = Logger.getLogger( "org.apache.archiva.AuditLog" );
public static final Logger logger = LoggerFactory.getLogger( "org.apache.archiva.AuditLog" );
private static final char DELIM = ' ';

View File

@ -19,6 +19,8 @@ package org.apache.maven.archiva.repository.project;
* under the License.
*/
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
@ -29,13 +31,12 @@ import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.project.resolvers.ManagedRepositoryProjectResolver;
import org.apache.maven.archiva.repository.project.resolvers.NopProjectResolver;
import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolverStack;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Factory for ProjectModelResolver objects
@ -45,9 +46,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.repository.project.ProjectModelResolverFactory"
*/
public class ProjectModelResolverFactory
extends AbstractLogEnabled
implements RegistryListener, Initializable
{
private Logger log = LoggerFactory.getLogger( ProjectModelResolverFactory.class );
/**
* @plexus.requirement
*/
@ -128,7 +130,7 @@ public class ProjectModelResolverFactory
}
catch ( RepositoryException e )
{
getLogger().warn( e.getMessage(), e );
log.warn( e.getMessage(), e );
}
}

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.repository.scanner;
* under the License.
*/
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
@ -26,12 +30,9 @@ import org.apache.maven.archiva.consumers.InvalidRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.apache.maven.archiva.repository.RepositoryException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.DirectoryWalker;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DefaultRepositoryScanner
@ -41,9 +42,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.repository.scanner.RepositoryScanner"
*/
public class DefaultRepositoryScanner
extends AbstractLogEnabled
implements RepositoryScanner
{
private Logger log = LoggerFactory.getLogger( DefaultRepositoryScanner.class );
/**
* @plexus.requirement
*/
@ -112,7 +114,7 @@ public class DefaultRepositoryScanner
// Setup the Scan Instance
RepositoryScannerInstance scannerInstance = new RepositoryScannerInstance( repository, knownContentConsumers,
invalidContentConsumers, getLogger(), changesSince );
invalidContentConsumers, changesSince );
dirWalker.addDirectoryWalkListener( scannerInstance );

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.repository.scanner;
* under the License.
*/
import java.io.File;
import java.util.List;
import org.apache.commons.collections.Closure;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.functors.IfClosure;
@ -30,12 +33,9 @@ import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerProcessFileClosure;
import org.apache.maven.archiva.repository.scanner.functors.ConsumerWantsFilePredicate;
import org.apache.maven.archiva.repository.scanner.functors.TriggerBeginScanClosure;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.DirectoryWalkListener;
import java.io.File;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* RepositoryScannerInstance
*
@ -45,6 +45,8 @@ import java.util.List;
public class RepositoryScannerInstance
implements DirectoryWalkListener
{
private Logger log = LoggerFactory.getLogger( RepositoryScannerInstance.class );
/**
* Consumers that process known content.
*/
@ -65,24 +67,21 @@ public class RepositoryScannerInstance
private ConsumerWantsFilePredicate consumerWantsFile;
private Logger logger;
public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
List<KnownRepositoryContentConsumer> knownConsumerList,
List<InvalidRepositoryContentConsumer> invalidConsumerList, Logger logger )
List<InvalidRepositoryContentConsumer> invalidConsumerList )
{
this.repository = repository;
this.knownConsumers = knownConsumerList;
this.invalidConsumers = invalidConsumerList;
this.logger = logger;
this.consumerProcessFile = new ConsumerProcessFileClosure( logger );
this.consumerProcessFile = new ConsumerProcessFileClosure();
this.consumerWantsFile = new ConsumerWantsFilePredicate();
stats = new RepositoryScanStatistics();
stats.setRepositoryId( repository.getId() );
Closure triggerBeginScan = new TriggerBeginScanClosure( repository, logger );
Closure triggerBeginScan = new TriggerBeginScanClosure( repository );
CollectionUtils.forAllDo( knownConsumerList, triggerBeginScan );
CollectionUtils.forAllDo( invalidConsumerList, triggerBeginScan );
@ -95,10 +94,9 @@ public class RepositoryScannerInstance
public RepositoryScannerInstance( ManagedRepositoryConfiguration repository,
List<KnownRepositoryContentConsumer> knownContentConsumers,
List<InvalidRepositoryContentConsumer> invalidContentConsumers, Logger logger,
long changesSince )
List<InvalidRepositoryContentConsumer> invalidContentConsumers, long changesSince )
{
this( repository, knownContentConsumers, invalidContentConsumers, logger );
this( repository, knownContentConsumers, invalidContentConsumers );
consumerWantsFile.setChangesSince( changesSince );
@ -112,13 +110,13 @@ public class RepositoryScannerInstance
public void directoryWalkStarting( File basedir )
{
logger.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() );
log.info( "Walk Started: [" + this.repository.getId() + "] " + this.repository.getLocation() );
stats.triggerStart();
}
public void directoryWalkStep( int percentage, File file )
{
logger.debug( "Walk Step: " + percentage + ", " + file );
log.debug( "Walk Step: " + percentage + ", " + file );
stats.increaseFileCount();
@ -146,7 +144,7 @@ public class RepositoryScannerInstance
public void directoryWalkFinished()
{
logger.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() );
log.info( "Walk Finished: [" + this.repository.getId() + "] " + this.repository.getLocation() );
stats.triggerFinished();
}
@ -155,6 +153,6 @@ public class RepositoryScannerInstance
*/
public void debug( String message )
{
logger.debug( "Repository Scanner: " + message );
log.debug( "Repository Scanner: " + message );
}
}

View File

@ -23,7 +23,8 @@ import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.RepositoryContentConsumer;
import org.codehaus.plexus.logging.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TriggerBeginScanClosure
@ -34,14 +35,13 @@ import org.codehaus.plexus.logging.Logger;
public class TriggerBeginScanClosure
implements Closure
{
private Logger log = LoggerFactory.getLogger( TriggerBeginScanClosure.class );
private ManagedRepositoryConfiguration repository;
private Logger logger;
public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository, Logger logger )
public TriggerBeginScanClosure( ManagedRepositoryConfiguration repository )
{
this.repository = repository;
this.logger = logger;
}
public void execute( Object input )
@ -56,7 +56,7 @@ public class TriggerBeginScanClosure
}
catch ( ConsumerException e )
{
logger.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e );
log.warn( "Consumer [" + consumer.getId() + "] cannot begin: " + e.getMessage(), e );
}
}
}

View File

@ -21,7 +21,6 @@ package org.apache.maven.archiva.transaction;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.IOUtil;
@ -43,7 +42,6 @@ import java.util.Map;
* @version $Id$
*/
public abstract class AbstractTransactionEvent
extends AbstractLogEnabled
implements TransactionEvent
{
private Map backups = new HashMap();

View File

@ -19,6 +19,12 @@ package org.apache.maven.archiva.database.browsing;
* under the License.
*/
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.PredicateUtils;
import org.apache.commons.collections.functors.NotPredicate;
@ -35,13 +41,8 @@ import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaProjectModel;
import org.apache.maven.archiva.model.Keys;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DefaultRepositoryBrowsing
@ -51,9 +52,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.database.browsing.RepositoryBrowsing"
*/
public class DefaultRepositoryBrowsing
extends AbstractLogEnabled
implements RepositoryBrowsing
{
private Logger log = LoggerFactory.getLogger( DefaultRepositoryBrowsing.class );
/**
* @plexus.requirement role-hint="jdo"
*/
@ -201,7 +203,7 @@ public class DefaultRepositoryBrowsing
{
Map<String, String> snapshots = new HashMap<String, String>();
getLogger().info( "Processing snapshots." );
log.info( "Processing snapshots." );
for ( String version : versions )
{
@ -253,7 +255,7 @@ public class DefaultRepositoryBrowsing
{
if ( VersionUtil.getBaseVersion( uniqueVersion ).equals( version ) )
{
getLogger().info( "Retrieving artifact with version " + uniqueVersion );
log.info( "Retrieving artifact with version " + uniqueVersion );
pomArtifact = dao.getArtifactDAO().getArtifact( groupId, artifactId, uniqueVersion, null, "pom" );
return pomArtifact;

View File

@ -28,7 +28,6 @@ import org.apache.maven.archiva.database.SimpleConstraint;
import org.apache.maven.archiva.database.constraints.AbstractSimpleConstraint;
import org.apache.maven.archiva.model.CompoundKey;
import org.codehaus.plexus.jdo.JdoFactory;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
@ -60,7 +59,6 @@ import javax.jdo.spi.PersistenceCapable;
* @plexus.component role="org.apache.maven.archiva.database.jdo.JdoAccess" role-hint="archiva"
*/
public class JdoAccess
extends AbstractLogEnabled
implements Initializable, InstanceLifecycleListener, StoreLifecycleListener
{
/**

View File

@ -25,7 +25,6 @@ import org.apache.maven.archiva.database.ProjectModelDAO;
import org.apache.maven.archiva.database.RepositoryContentStatisticsDAO;
import org.apache.maven.archiva.database.RepositoryProblemDAO;
import org.apache.maven.archiva.database.SimpleConstraint;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.Serializable;
import java.util.List;
@ -39,7 +38,6 @@ import java.util.List;
* @plexus.component role-hint="jdo"
*/
public class JdoArchivaDAO
extends AbstractLogEnabled
implements ArchivaDAO
{
/**

View File

@ -26,7 +26,6 @@ import org.apache.maven.archiva.database.ObjectNotFoundException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaArtifactModel;
import org.apache.maven.archiva.model.jpox.ArchivaArtifactModelKey;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.ArrayList;
import java.util.Iterator;
@ -41,7 +40,6 @@ import java.util.List;
* @plexus.component role-hint="jdo"
*/
public class JdoArtifactDAO
extends AbstractLogEnabled
implements ArtifactDAO
{
/**

View File

@ -19,6 +19,8 @@ package org.apache.maven.archiva.database.project;
* under the License.
*/
import java.util.List;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.ObjectNotFoundException;
@ -28,9 +30,8 @@ import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelResolver;
import org.apache.maven.archiva.repository.project.resolvers.FilesystemBasedResolver;
import org.apache.maven.archiva.repository.project.resolvers.ProjectModelResolutionListener;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Just in Time save of project models to the database, implemented as a listener
@ -44,9 +45,10 @@ import java.util.List;
* role-hint="model-to-db"
*/
public class ProjectModelToDatabaseListener
extends AbstractLogEnabled
implements ProjectModelResolutionListener
{
private Logger log = LoggerFactory.getLogger( ProjectModelToDatabaseListener.class );
/**
* @plexus.requirement role-hint="jdo"
*/
@ -147,7 +149,7 @@ public class ProjectModelToDatabaseListener
}
catch ( ProjectModelException e )
{
getLogger().warn( e.getMessage(), e );
log.warn( e.getMessage(), e );
}
}
}

View File

@ -19,6 +19,10 @@ package org.apache.maven.archiva.database.updater;
* under the License.
*/
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.IteratorUtils;
import org.apache.commons.collections.Predicate;
@ -29,11 +33,8 @@ import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.ArtifactsProcessedConstraint;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.functors.UnprocessedArtifactPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* JdoDatabaseUpdater
@ -45,9 +46,10 @@ import java.util.List;
* role-hint="jdo"
*/
public class JdoDatabaseUpdater
extends AbstractLogEnabled
implements DatabaseUpdater
{
private Logger log = LoggerFactory.getLogger( JdoDatabaseUpdater.class );
/**
* @plexus.requirement role-hint="jdo"
*/
@ -144,7 +146,7 @@ public class JdoDatabaseUpdater
if ( CollectionUtils.isEmpty( consumers ) )
{
getLogger().warn( "There are no selected consumers for unprocessed artifacts." );
log.warn( "There are no selected consumers for unprocessed artifacts." );
return;
}
@ -162,7 +164,7 @@ public class JdoDatabaseUpdater
if ( CollectionUtils.isEmpty( consumers ) )
{
getLogger().warn( "There are no selected consumers for artifact cleanup." );
log.warn( "There are no selected consumers for artifact cleanup." );
return;
}

View File

@ -23,7 +23,8 @@ import org.apache.commons.collections.Closure;
import org.apache.maven.archiva.consumers.ArchivaArtifactConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ProcessArchivaArtifactClosure
@ -36,9 +37,10 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* instantiation-strategy="per-lookup"
*/
class ProcessArchivaArtifactClosure
extends AbstractLogEnabled
implements Closure
{
private Logger log = LoggerFactory.getLogger( ProcessArchivaArtifactClosure.class );
private ArchivaArtifact artifact;
public void execute( Object input )
@ -53,9 +55,7 @@ class ProcessArchivaArtifactClosure
}
catch ( ConsumerException e )
{
getLogger().warn(
"Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId()
+ "]" );
log.warn( "Unable to process artifact [" + artifact + "] with consumer [" + consumer.getId() + "]" );
}
}

View File

@ -19,8 +19,6 @@ package org.apache.maven.archiva.reporting;
* under the License.
*/
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.Map;
/**
@ -33,7 +31,6 @@ import java.util.Map;
* role-hint="default"
*/
public class DefaultReportingManager
extends AbstractLogEnabled
implements ReportingManager
{
/**

View File

@ -19,6 +19,9 @@ package org.apache.maven.archiva.scheduled;
* under the License.
*/
import java.text.ParseException;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
@ -27,7 +30,6 @@ import org.apache.maven.archiva.scheduled.tasks.ArchivaTask;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTaskSelectionPredicate;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Startable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StartingException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.StoppingException;
@ -43,9 +45,8 @@ import org.quartz.CronTrigger;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.SchedulerException;
import java.text.ParseException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default implementation of a scheduling component for archiva.
@ -55,9 +56,10 @@ import java.util.List;
* @plexus.component role="org.apache.maven.archiva.scheduled.ArchivaTaskScheduler" role-hint="default"
*/
public class DefaultArchivaTaskScheduler
extends AbstractLogEnabled
implements ArchivaTaskScheduler, Startable, RegistryListener
{
private Logger log = LoggerFactory.getLogger( DefaultArchivaTaskScheduler.class );
/**
* @plexus.requirement
*/
@ -134,7 +136,7 @@ public class DefaultArchivaTaskScheduler
{
if ( repoConfig.getRefreshCronExpression() == null )
{
getLogger().warn( "Skipping job, no cron expression for " + repoConfig.getId() );
log.warn( "Skipping job, no cron expression for " + repoConfig.getId() );
return;
}
@ -144,7 +146,7 @@ public class DefaultArchivaTaskScheduler
CronExpressionValidator cronValidator = new CronExpressionValidator();
if ( !cronValidator.validate( cronString ) )
{
getLogger().warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
log.warn( "Cron expression [" + cronString + "] for repository [" + repoConfig.getId() +
"] is invalid. Defaulting to hourly." );
cronString = CRON_HOURLY;
}
@ -168,7 +170,7 @@ public class DefaultArchivaTaskScheduler
}
catch ( ParseException e )
{
getLogger().error(
log.error(
"ParseException in repository scanning cron expression, disabling repository scanning for '" +
repoConfig.getId() + "': " + e.getMessage() );
}
@ -190,7 +192,7 @@ public class DefaultArchivaTaskScheduler
CronExpressionValidator cronValidator = new CronExpressionValidator();
if ( !cronValidator.validate( cronString ) )
{
getLogger().warn(
log.warn(
"Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." );
cronString = CRON_HOURLY;
}
@ -203,7 +205,7 @@ public class DefaultArchivaTaskScheduler
}
catch ( ParseException e )
{
getLogger().error(
log.error(
"ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() );
}
@ -235,7 +237,7 @@ public class DefaultArchivaTaskScheduler
// cronExpression comes from the database scanning section
if ( "cronExpression".equals( propertyName ) )
{
getLogger().debug( "Restarting the database scheduled task after property change: " + propertyName );
log.debug( "Restarting the database scheduled task after property change: " + propertyName );
try
{
@ -245,7 +247,7 @@ public class DefaultArchivaTaskScheduler
}
catch ( SchedulerException e )
{
getLogger().error( "Error restarting the database scanning job after property change." );
log.error( "Error restarting the database scanning job after property change." );
}
}
@ -269,7 +271,7 @@ public class DefaultArchivaTaskScheduler
}
catch ( SchedulerException e )
{
getLogger().error( "error restarting job: " + REPOSITORY_JOB + ":" + repoConfig.getId() );
log.error( "error restarting job: " + REPOSITORY_JOB + ":" + repoConfig.getId() );
}
}
}

View File

@ -22,12 +22,13 @@ package org.apache.maven.archiva.scheduled.executors;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.updater.DatabaseUpdater;
import org.apache.maven.archiva.scheduled.tasks.DatabaseTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ArchivaDatabaseTaskExecutor
@ -40,9 +41,10 @@ import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
* role-hint="database-update"
*/
public class ArchivaDatabaseUpdateTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor, Initializable
{
private Logger log = LoggerFactory.getLogger( ArchivaDatabaseUpdateTaskExecutor.class );
/**
* @plexus.requirement role-hint="jdo"
*/
@ -51,7 +53,7 @@ public class ArchivaDatabaseUpdateTaskExecutor
public void initialize()
throws InitializationException
{
getLogger().info( "Initialized " + this.getClass().getName() );
log.info( "Initialized " + this.getClass().getName() );
}
public void executeTask( Task task )
@ -59,12 +61,12 @@ public class ArchivaDatabaseUpdateTaskExecutor
{
DatabaseTask dbtask = (DatabaseTask) task;
getLogger().info( "Executing task from queue with job name: " + dbtask.getName() );
log.info( "Executing task from queue with job name: " + dbtask.getName() );
long time = System.currentTimeMillis();
try
{
getLogger().info( "Task: Updating unprocessed artifacts" );
log.info( "Task: Updating unprocessed artifacts" );
databaseUpdater.updateAllUnprocessed();
}
catch ( ArchivaDatabaseException e )
@ -74,7 +76,7 @@ public class ArchivaDatabaseUpdateTaskExecutor
try
{
getLogger().info( "Task: Updating processed artifacts" );
log.info( "Task: Updating processed artifacts" );
databaseUpdater.updateAllProcessed();
}
catch ( ArchivaDatabaseException e )
@ -84,6 +86,6 @@ public class ArchivaDatabaseUpdateTaskExecutor
time = System.currentTimeMillis() - time;
getLogger().info( "Finished database task in " + time + "ms." );
log.info( "Finished database task in " + time + "ms." );
}
}

View File

@ -19,26 +19,26 @@ package org.apache.maven.archiva.scheduled.executors;
* under the License.
*/
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.database.constraints.MostRecentRepositoryScanStatistics;
import org.apache.maven.archiva.model.RepositoryContentStatistics;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.scanner.RepositoryScanStatistics;
import org.apache.maven.archiva.repository.scanner.RepositoryScanner;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.taskqueue.Task;
import org.codehaus.plexus.taskqueue.execution.TaskExecutionException;
import org.codehaus.plexus.taskqueue.execution.TaskExecutor;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ArchivaRepositoryScanningTaskExecutor
@ -51,9 +51,10 @@ import java.util.List;
* role-hint="repository-scanning"
*/
public class ArchivaRepositoryScanningTaskExecutor
extends AbstractLogEnabled
implements TaskExecutor, Initializable
{
private Logger log = LoggerFactory.getLogger( ArchivaRepositoryScanningTaskExecutor.class );
/**
* @plexus.requirement role-hint="jdo"
*/
@ -74,7 +75,7 @@ public class ArchivaRepositoryScanningTaskExecutor
public void initialize()
throws InitializationException
{
getLogger().info( "Initialized " + this.getClass().getName() );
log.info( "Initialized " + this.getClass().getName() );
}
public void executeTask( Task task )
@ -87,7 +88,7 @@ public class ArchivaRepositoryScanningTaskExecutor
throw new TaskExecutionException("Unable to execute RepositoryTask with blank repository Id.");
}
getLogger().info( "Executing task from queue with job name: " + repoTask.getName() );
log.info( "Executing task from queue with job name: " + repoTask.getName() );
try
{
@ -105,7 +106,7 @@ public class ArchivaRepositoryScanningTaskExecutor
RepositoryScanStatistics stats = repoScanner.scan( arepo, sinceWhen );
getLogger().info( "Finished repository task: " + stats.toDump( arepo ) );
log.info( "Finished repository task: " + stats.toDump( arepo ) );
// I hate jpox and modello
RepositoryContentStatistics dbstats = new RepositoryContentStatistics();

View File

@ -19,11 +19,12 @@ package org.apache.maven.archiva.security;
* under the License.
*/
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.util.List;
import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ArchivaStandardRolesCheck tests for the existance of expected / standard roles and permissions.
@ -35,9 +36,10 @@ import java.util.List;
* role-hint="required-roles"
*/
public class ArchivaStandardRolesCheck
extends AbstractLogEnabled
implements EnvironmentCheck
{
private Logger log = LoggerFactory.getLogger( ArchivaStandardRolesCheck.class );
/**
* @plexus.requirement role-hint="cached"
*/
@ -61,7 +63,7 @@ public class ArchivaStandardRolesCheck
ArchivaRoleConstants.REGISTERED_USER_ROLE,
ArchivaRoleConstants.USER_ADMINISTRATOR_ROLE };
getLogger().info( "Checking the existance of required roles." );
log.info( "Checking the existance of required roles." );
for ( String roleName : expectedRoles )
{
@ -85,7 +87,7 @@ public class ArchivaStandardRolesCheck
ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS,
"archiva-guest" };
getLogger().info( "Checking the existance of required operations." );
log.info( "Checking the existance of required operations." );
for ( String operation : expectedOperations )
{

View File

@ -19,23 +19,24 @@ package org.apache.maven.archiva.security;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.rbac.RbacManagerException;
import org.codehaus.plexus.redback.rbac.UserAssignment;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SecurityStartup
@ -46,9 +47,10 @@ import java.util.Map.Entry;
* @plexus.component role="org.apache.maven.archiva.security.SecurityStartup"
*/
public class SecurityStartup
extends AbstractLogEnabled
implements RegistryListener
{
private Logger log = LoggerFactory.getLogger( SecurityStartup.class );
/**
* @plexus.requirement
*/
@ -104,7 +106,7 @@ public class SecurityStartup
}
catch ( RbacManagerException e )
{
getLogger().warn(
log.warn(
"Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId )
+ "] to " + principal + " user.", e );
}
@ -129,7 +131,7 @@ public class SecurityStartup
}
catch ( ArchivaSecurityException e )
{
getLogger().warn( e.getMessage(), e );
log.warn( e.getMessage(), e );
}
}
}
@ -145,7 +147,7 @@ public class SecurityStartup
}
catch ( ArchivaSecurityException e )
{
getLogger().warn( e.getMessage(), e );
log.warn( e.getMessage(), e );
}
}
}
@ -164,7 +166,7 @@ public class SecurityStartup
for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() )
{
EnvironmentCheck check = entry.getValue();
getLogger().info( "Running Environment Check: " + entry.getKey() );
log.info( "Running Environment Check: " + entry.getKey() );
check.validateEnvironment( violations );
}
@ -183,7 +185,7 @@ public class SecurityStartup
msg.append( "\n" );
msg.append( "======================================================================" );
getLogger().fatalError( msg.toString() );
log.error( msg.toString() );
throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size()
+ "] violation(s) encountered, See log for details." );

View File

@ -23,7 +23,6 @@ import com.opensymphony.webwork.ServletActionContext;
import com.opensymphony.xwork.ActionInvocation;
import com.opensymphony.xwork.interceptor.Interceptor;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import javax.servlet.ServletContext;
@ -35,7 +34,6 @@ import javax.servlet.ServletContext;
* role-hint="configurationInterceptor"
*/
public class ConfigurationInterceptor
extends AbstractLogEnabled
implements Interceptor
{
/**

View File

@ -21,9 +21,10 @@ package org.apache.maven.archiva.web.startup;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ArchivaStartup - the startup of all archiva features in a deterministic order.
@ -36,7 +37,6 @@ import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationExce
* role-hint="default"
*/
public class ArchivaStartup
extends AbstractLogEnabled
implements Initializable
{
/**
@ -57,7 +57,7 @@ public class ArchivaStartup
public void initialize()
throws InitializationException
{
Banner.display( getLogger(), ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
Banner.display( ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
try
{

View File

@ -19,14 +19,13 @@ package org.apache.maven.archiva.web.startup;
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.slf4j.LoggerFactory;
/**
* Banner
*
@ -38,7 +37,6 @@ import java.util.regex.Pattern;
* role-hint="default"
*/
public class Banner
extends AbstractLogEnabled
{
public static String encode( String raw )
{
@ -224,15 +222,15 @@ public class Banner
return injectVersion( decode( encodedBanner ), version );
}
public static void display( Logger logger, String version )
public static void display( String version )
{
String banner = getBanner( version );
logger.info( StringUtils.repeat( "_", 25 ) + "\n" + banner );
LoggerFactory.getLogger( Banner.class ).info( StringUtils.repeat( "_", 25 ) + "\n" + banner );
}
public void initialize()
throws InitializationException
{
Banner.display( getLogger(), ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
Banner.display( ArchivaVersion.determineVersion( this.getClass().getClassLoader() ) );
}
}

View File

@ -23,7 +23,6 @@ import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.database.project.ProjectModelToDatabaseListener;
import org.apache.maven.archiva.repository.project.ProjectModelResolver;
import org.apache.maven.archiva.repository.project.ProjectModelResolverFactory;
import org.codehaus.plexus.logging.AbstractLogEnabled;
/**
* ResolverFactoryInit - Initialize the Resolver Factory, and hook it up to
@ -37,7 +36,6 @@ import org.codehaus.plexus.logging.AbstractLogEnabled;
* role-hint="default"
*/
public class ResolverFactoryInit
extends AbstractLogEnabled
{
/**
* @plexus.requirement role-hint="database"

View File

@ -19,13 +19,17 @@ package org.apache.maven.archiva.web.startup;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.security.ArchivaRoleConstants;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.redback.rbac.RBACManager;
import org.codehaus.plexus.redback.rbac.RbacManagerException;
import org.codehaus.plexus.redback.rbac.UserAssignment;
@ -34,11 +38,8 @@ import org.codehaus.plexus.redback.role.RoleManagerException;
import org.codehaus.plexus.redback.system.check.EnvironmentCheck;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ConfigurationSynchronization
@ -50,9 +51,10 @@ import java.util.Map.Entry;
* role-hint="default"
*/
public class SecuritySynchronization
extends AbstractLogEnabled
implements RegistryListener
{
private Logger log = LoggerFactory.getLogger( SecuritySynchronization.class );
/**
* @plexus.requirement role-hint="default"
*/
@ -112,7 +114,7 @@ public class SecuritySynchronization
catch ( RoleManagerException e )
{
// Log error.
getLogger().error( "Unable to create roles for configured repositories: " + e.getMessage(), e );
log.error( "Unable to create roles for configured repositories: " + e.getMessage(), e );
}
}
}
@ -145,7 +147,7 @@ public class SecuritySynchronization
for ( Entry<String, EnvironmentCheck> entry : checkers.entrySet() )
{
EnvironmentCheck check = entry.getValue();
getLogger().info( "Running Environment Check: " + entry.getKey() );
log.info( "Running Environment Check: " + entry.getKey() );
check.validateEnvironment( violations );
}
@ -164,7 +166,7 @@ public class SecuritySynchronization
msg.append( "\n" );
msg.append( "======================================================================" );
getLogger().fatalError( msg.toString() );
log.error( msg.toString() );
throw new ArchivaException( "Unable to initialize Redback Security Environment, [" + violations.size()
+ "] violation(s) encountered, See log for details." );
@ -198,7 +200,7 @@ public class SecuritySynchronization
}
catch ( RbacManagerException e )
{
getLogger().warn( "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId )
log.warn( "Unable to add role [" + ArchivaRoleConstants.toRepositoryObserverRoleName( repoId )
+ "] to " + principal + " user.", e );
}
}

View File

@ -19,15 +19,17 @@ package org.apache.maven.archiva.web.tags;
* under the License.
*/
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.IOException;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.JspWriter;
import javax.servlet.jsp.PageContext;
import java.io.IOException;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.web.util.ContextUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* CopyPasteSnippet
@ -37,8 +39,9 @@ import java.io.IOException;
* @plexus.component role="org.apache.maven.archiva.web.tags.CopyPasteSnippet"
*/
public class CopyPasteSnippet
extends AbstractLogEnabled
{
private Logger log = LoggerFactory.getLogger( CopyPasteSnippet.class );
public static final String PRE = "pre";
public static final String TOGGLE = "toggle";
@ -53,7 +56,7 @@ public class CopyPasteSnippet
if ( o == null )
{
buf.append( "Error generating snippet." );
getLogger().error( "Unable to generate snippet for null object." );
log.error( "Unable to generate snippet for null object." );
}
else if ( o instanceof ManagedRepositoryConfiguration )
{

View File

@ -19,6 +19,12 @@ package org.apache.maven.archiva.web.tags;
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import javax.servlet.jsp.PageContext;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.ArchivaException;
import org.apache.maven.archiva.dependency.DependencyGraphFactory;
@ -34,15 +40,10 @@ import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.DependencyScope;
import org.apache.maven.archiva.model.Keys;
import org.apache.maven.archiva.model.VersionedReference;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import javax.servlet.jsp.PageContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* DependencyTree
@ -53,9 +54,10 @@ import javax.servlet.jsp.PageContext;
* @plexus.component role="org.apache.maven.archiva.web.tags.DependencyTree"
*/
public class DependencyTree
extends AbstractLogEnabled
implements Initializable
{
private Logger log = LoggerFactory.getLogger( DependencyTree.class );
/**
* @plexus.requirement
* role="org.apache.maven.archiva.dependency.graph.DependencyGraphBuilder"
@ -121,7 +123,7 @@ public class DependencyTree
{
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: groupId is blank.";
getLogger().error( emsg );
log.error( emsg );
throw new ArchivaException( emsg );
}
@ -129,7 +131,7 @@ public class DependencyTree
{
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: artifactId is blank.";
getLogger().error( emsg );
log.error( emsg );
throw new ArchivaException( emsg );
}
@ -137,7 +139,7 @@ public class DependencyTree
{
String emsg = "Error generating dependency tree [" + Keys.toKey( groupId, artifactId, modelVersion )
+ "]: version is blank.";
getLogger().error( emsg );
log.error( emsg );
throw new ArchivaException( emsg );
}
@ -249,7 +251,7 @@ public class DependencyTree
catch ( GraphTaskException e )
{
String emsg = "Unable to generate graph for [" + Keys.toKey( projectRef ) + "] : " + e.getMessage();
getLogger().warn( emsg, e );
log.warn( emsg, e );
throw new ArchivaException( emsg, e );
}
}