Utilize parameterized logging and remove toString() calls

As suggested in the SLF4J FAQ:
https://www.slf4j.org/faq.html#logging_performance
.. parameterized logging can improve the efficiency of logger calls when
logging at the specified level is disabled.

In addition, per the FAQ: https://www.slf4j.org/faq.html#paramException
since SLF4J 1.6.0 it is possible to use parameterized logging and also
log an exception/throwable.

toString() is called automatically:
https://www.slf4j.org/faq.html#string_contents

These changes were suggested by SLF4J Helper for NetBeans IDE:
http://plugins.netbeans.org/plugin/72557/
This commit is contained in:
Daniel Trebbien 2017-07-30 13:26:05 -07:00
parent f637c02c71
commit 0b15664f1c
23 changed files with 53 additions and 54 deletions

View File

@ -703,7 +703,7 @@ public class DefaultArchivaConfiguration
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Unable to create " + filetype + " file: " + e.getMessage(), e ); log.error( "Unable to create {} file: {}", filetype, e.getMessage(), e );
return false; return false;
} }
} }

View File

@ -144,7 +144,7 @@ public abstract class AbstractRepositoryPurgeTest
if ( path.contains( " " ) ) if ( path.contains( " " ) )
{ {
LoggerFactory.getLogger( AbstractRepositoryPurgeTest.class.getName() ).error( LoggerFactory.getLogger( AbstractRepositoryPurgeTest.class.getName() ).error(
"You are building and testing with a path: \n " + path + " containing space. Consider relocating." ); "You are building and testing with a path: \n {} containing space. Consider relocating.", path );
return path.replaceAll( " ", "&20" ); return path.replaceAll( " ", "&20" );
} }
return path; return path;

View File

@ -219,7 +219,7 @@ public class NexusIndexerConsumer
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Error queueing task: " + task + ": " + e.getMessage(), e ); log.error( "Error queueing task: {}: {}", task, e.getMessage(), e );
} }
} }

View File

@ -112,7 +112,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread1 ok"); logger.info("thread1 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -132,7 +132,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread2 ok"); logger.info("thread2 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -156,7 +156,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread3 ok"); logger.info("thread3 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -177,7 +177,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread4 ok"); logger.info("thread4 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -198,7 +198,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread5 ok"); logger.info("thread5 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -221,7 +221,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread6 ok"); logger.info("thread6 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -242,7 +242,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread7 ok"); logger.info("thread7 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -265,7 +265,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread8 ok"); logger.info("thread8 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -286,7 +286,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread9 ok"); logger.info("thread9 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }
@ -308,7 +308,7 @@ public class DefaultFileLockManagerTest {
logger.info("thread10 ok"); logger.info("thread10 ok");
success.incrementAndGet(); success.incrementAndGet();
} catch (Throwable e) { } catch (Throwable e) {
logger.error("Error occured " + e.getMessage()); logger.error("Error occured {}", e.getMessage());
e.printStackTrace(); e.printStackTrace();
throw e; throw e;
} }

View File

@ -76,14 +76,14 @@ public class DefaultFileLockManagerTimeoutTest
try { try {
Files.copy(largeJar.toPath(), lock.getFile().toPath(), StandardCopyOption.REPLACE_EXISTING); Files.copy(largeJar.toPath(), lock.getFile().toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (IOException e) { } catch (IOException e) {
logger.warn("Copy failed "+e.getMessage()); logger.warn("Copy failed {}", e.getMessage());
// On windows a FileSystemException is thrown // On windows a FileSystemException is thrown
// We ignore this // We ignore this
} }
lock = fileLockManager.writeFileLock(file); lock = fileLockManager.writeFileLock(file);
} catch (FileSystemException ex) { } catch (FileSystemException ex) {
logger.error("Exception from filesystem "+ex.getMessage()); logger.error("Exception from filesystem {}", ex.getMessage());
ex.printStackTrace(); ex.printStackTrace();
throw ex; throw ex;
} }

View File

@ -428,10 +428,9 @@ public class DefaultRepositoryProxyConnectors
"Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}", "Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), path, e.getMessage() ); targetRepository.getRepository().getId(), path, e.getMessage() );
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ), log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
"Transfer error from repository \"" + targetRepository.getRepository().getId() "Transfer error from repository \"{}"
+ "\" for resource " + path + ", continuing to next repository. Error message: {}", + "\" for resource {}, continuing to next repository. Error message: {}",
e.getMessage(), e targetRepository.getRepository().getId(), path, e.getMessage(), e );
);
} }
catch ( RepositoryAdminException e ) catch ( RepositoryAdminException e )
{ {
@ -830,8 +829,8 @@ public class DefaultRepositoryProxyConnectors
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "']." ); + "'].", localFile.getName() );
} }
} }

View File

@ -89,8 +89,8 @@ public abstract class AbstractRepositoryAdminTest
if ( path.contains( SPACE ) ) if ( path.contains( SPACE ) )
{ {
LoggerFactory.getLogger( AbstractRepositoryAdminTest.class.getName() ).error( LoggerFactory.getLogger( AbstractRepositoryAdminTest.class.getName() ).error(
"You are building and testing with {appserver.base}: \n " + path "You are building and testing with {appserver.base}: \n {}"
+ " containing space. Consider relocating." ); + " containing space. Consider relocating.", path );
} }
return path.replaceAll( SPACE, "&20" ); return path.replaceAll( SPACE, "&20" );
} }

View File

@ -77,8 +77,8 @@ public class ConsumerProcessFileClosure
/* Intentionally Catch all exceptions. /* Intentionally Catch all exceptions.
* So that the discoverer processing can continue. * So that the discoverer processing can continue.
*/ */
log.error( "Consumer [" + id + "] had an error when processing file [" log.error( "Consumer [{}] had an error when processing file ["
+ basefile.getAbsolutePath() + "]: " + e.getMessage(), e ); + "{}]: {}", id, basefile.getAbsolutePath(), e.getMessage(), e );
} }
} }
} }

View File

@ -135,7 +135,7 @@ public class ArchivaIndexingTaskExecutor
} }
catch ( RepositoryAdminException e ) catch ( RepositoryAdminException e )
{ {
log.error( "Error occurred while creating context: " + e.getMessage() ); log.error( "Error occurred while creating context: {}", e.getMessage() );
throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage(), e ); throw new TaskExecutionException( "Error occurred while creating context: " + e.getMessage(), e );
} }
} }
@ -227,7 +227,7 @@ public class ArchivaIndexingTaskExecutor
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage(), log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage(),
e ); e );
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'", throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
e ); e );
@ -261,7 +261,7 @@ public class ArchivaIndexingTaskExecutor
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Error occurred while executing indexing task '" + indexingTask + "': " + e.getMessage() ); log.error( "Error occurred while executing indexing task '{}': {}", indexingTask, e.getMessage() );
throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'", throw new TaskExecutionException( "Error occurred while executing indexing task '" + indexingTask + "'",
e ); e );
} }

View File

@ -83,7 +83,7 @@ public class DefaultCommonServices
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.warn( "skip error loading properties {}", resourceName.toString() ); log.warn( "skip error loading properties {}", resourceName );
} }
return fromProperties( properties ); return fromProperties( properties );

View File

@ -72,7 +72,7 @@ public class DefaultPluginsServices
{ {
sb.append( repoType ).append( "|" ); sb.append( repoType ).append( "|" );
} }
log.debug( "getAdminPlugins: {}", sb.toString() ); log.debug( "getAdminPlugins: {}", sb );
if ( sb.length() > 1 ) if ( sb.length() > 1 )
{ {
adminPlugins = sb.substring( 0, sb.length() - 1 ); adminPlugins = sb.substring( 0, sb.length() - 1 );

View File

@ -352,7 +352,7 @@ public class DefaultRepositoriesService
} }
catch ( AuthorizationException e ) catch ( AuthorizationException e )
{ {
log.error( "error reading permission: " + e.getMessage(), e ); log.error( "error reading permission: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(), e ); throw new ArchivaRestServiceException( e.getMessage(), e );
} }
@ -370,7 +370,7 @@ public class DefaultRepositoriesService
} }
catch ( AuthorizationException e ) catch ( AuthorizationException e )
{ {
log.error( "error reading permission: " + e.getMessage(), e ); log.error( "error reading permission: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(), e ); throw new ArchivaRestServiceException( e.getMessage(), e );
} }
@ -394,7 +394,7 @@ public class DefaultRepositoriesService
if ( StringUtils.isEmpty( artifactSourcePath ) ) if ( StringUtils.isEmpty( artifactSourcePath ) )
{ {
log.error( "cannot find artifact " + artifactTransferRequest.toString() ); log.error( "cannot find artifact {}", artifactTransferRequest );
throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(), throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(),
null ); null );
} }
@ -403,7 +403,7 @@ public class DefaultRepositoriesService
if ( !artifactFile.exists() ) if ( !artifactFile.exists() )
{ {
log.error( "cannot find artifact " + artifactTransferRequest.toString() ); log.error( "cannot find artifact {}", artifactTransferRequest );
throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(), throw new ArchivaRestServiceException( "cannot find artifact " + artifactTransferRequest.toString(),
null ); null );
} }
@ -487,17 +487,17 @@ public class DefaultRepositoriesService
} }
catch ( RepositoryException e ) catch ( RepositoryException e )
{ {
log.error( "RepositoryException: " + e.getMessage(), e ); log.error( "RepositoryException: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(), e ); throw new ArchivaRestServiceException( e.getMessage(), e );
} }
catch ( RepositoryAdminException e ) catch ( RepositoryAdminException e )
{ {
log.error( "RepositoryAdminException: " + e.getMessage(), e ); log.error( "RepositoryAdminException: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(), e ); throw new ArchivaRestServiceException( e.getMessage(), e );
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "IOException: " + e.getMessage(), e ); log.error( "IOException: {}", e.getMessage(), e );
throw new ArchivaRestServiceException( e.getMessage(), e ); throw new ArchivaRestServiceException( e.getMessage(), e );
} }
return true; return true;
@ -517,8 +517,8 @@ public class DefaultRepositoriesService
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "']." ); + "'].", localFile.getName() );
} }
} }

View File

@ -569,8 +569,8 @@ public class DefaultFileUploadService
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "']." ); + "'].", localFile.getName() );
} }
} }

View File

@ -73,7 +73,7 @@ public class ArchivaLdapConnectionFactory
} }
catch ( InvalidNameException e ) catch ( InvalidNameException e )
{ {
log.error("Error during initialization of LdapConnectionFactory "+e.getMessage(),e); log.error( "Error during initialization of LdapConnectionFactory {}", e.getMessage(), e );
// throw new RuntimeException( "Error while initializing connection factory.", e ); // throw new RuntimeException( "Error while initializing connection factory.", e );
} }
catch ( RepositoryAdminException e ) catch ( RepositoryAdminException e )

View File

@ -89,7 +89,7 @@ public class ArchivaUserManagerAuthenticator
} }
catch ( RepositoryAdminException e ) catch ( RepositoryAdminException e )
{ {
log.error("Error during repository initialization "+e.getMessage(),e); log.error( "Error during repository initialization {}", e.getMessage(), e );
// throw new AuthenticationException( e.getMessage(), e ); // throw new AuthenticationException( e.getMessage(), e );
} }
} }

View File

@ -152,7 +152,7 @@ public class SecuritySynchronization
catch ( RoleManagerException e ) catch ( RoleManagerException e )
{ {
// Log error. // Log error.
log.error( "Unable to create roles for configured repositories: " + e.getMessage(), e ); log.error( "Unable to create roles for configured repositories: {}", e.getMessage(), e );
} }
} }

View File

@ -723,8 +723,8 @@ public class ArchivaDavResource
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() log.error( "Unable to queue repository task to execute consumers on resource file ['{}"
+ "']." ); + "'].", localFile.getName() );
} }
} }
} }

View File

@ -100,7 +100,7 @@ public class MimeTypes
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Unable to load mime map " + resourceName + " : " + e.getMessage(), e ); log.error( "Unable to load mime map {} : {}", resourceName, e.getMessage(), e );
} }
} }
@ -146,7 +146,7 @@ public class MimeTypes
} }
catch ( IOException e ) catch ( IOException e )
{ {
log.error( "Unable to read mime types from input stream : " + e.getMessage(), e ); log.error( "Unable to read mime types from input stream : {}", e.getMessage(), e );
} }
} }

View File

@ -134,7 +134,7 @@ public class DefaultAuditManager
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
log.error( "Invalid audit event found in the metadata repository: " + e.getMessage() ); log.error( "Invalid audit event found in the metadata repository: {}", e.getMessage() );
// continue and ignore this one // continue and ignore this one
} }
} }

View File

@ -458,7 +458,7 @@ public class DefaultCassandraArchivaManager
{ // ensure keyspace exists, here if the keyspace doesn't exist we suppose nothing exist { // ensure keyspace exists, here if the keyspace doesn't exist we suppose nothing exist
if ( cluster.describeKeyspace( keyspaceName ) == null ) if ( cluster.describeKeyspace( keyspaceName ) == null )
{ {
logger.info( "Creating Archiva Cassandra '" + keyspaceName + "' keyspace." ); logger.info( "Creating Archiva Cassandra '{}' keyspace.", keyspaceName );
cluster.addKeyspace( HFactory.createKeyspaceDefinition( keyspaceName, // cluster.addKeyspace( HFactory.createKeyspaceDefinition( keyspaceName, //
ThriftKsDef.DEF_STRATEGY_CLASS, // ThriftKsDef.DEF_STRATEGY_CLASS, //
replicationFactor, // replicationFactor, //

View File

@ -570,7 +570,7 @@ public class FileMetadataRepository
MetadataFacetFactory factory = metadataFacetFactories.get( facetId ); MetadataFacetFactory factory = metadataFacetFactories.get( facetId );
if ( factory == null ) if ( factory == null )
{ {
log.error( "Attempted to load unknown artifact metadata facet: " + facetId ); log.error( "Attempted to load unknown artifact metadata facet: {}", facetId );
} }
else else
{ {

View File

@ -1353,7 +1353,7 @@ public class JcrMetadataRepository
getJcrSession().save(); getJcrSession().save();
} catch ( InvalidItemStateException e ) { } catch ( InvalidItemStateException e ) {
// olamy this might happen when deleting a repo while is under scanning // olamy this might happen when deleting a repo while is under scanning
log.warn( "skip InvalidItemStateException:" + e.getMessage(), e ); log.warn( "skip InvalidItemStateException:{}", e.getMessage(), e );
} }
catch ( RepositoryException e ) catch ( RepositoryException e )
{ {
@ -1493,7 +1493,7 @@ public class JcrMetadataRepository
MetadataFacetFactory factory = metadataFacetFactories.get( name ); MetadataFacetFactory factory = metadataFacetFactories.get( name );
if ( factory == null ) if ( factory == null )
{ {
log.error( "Attempted to load unknown project version metadata facet: " + name ); log.error( "Attempted to load unknown project version metadata facet: {}", name );
} }
else else
{ {

View File

@ -156,7 +156,7 @@ public class DefaultRepositoryStatisticsManager
} }
catch ( ParseException e ) catch ( ParseException e )
{ {
log.error( "Invalid scan result found in the metadata repository: " + e.getMessage() ); log.error( "Invalid scan result found in the metadata repository: {}", e.getMessage() );
// continue and ignore this one // continue and ignore this one
} }
} }