From 7e583a38247c433638e4430a1620f2ee58bcbc97 Mon Sep 17 00:00:00 2001 From: wenxinhe Date: Tue, 18 Jul 2017 19:06:25 +0800 Subject: [PATCH] HADOOP-14539. Move commons logging APIs over to slf4j in hadoop-common. This closes #251 Signed-off-by: Akira Ajisaka --- .../org/apache/hadoop/conf/Configuration.java | 16 +++++++-------- .../hadoop/conf/ReconfigurableBase.java | 7 ++++--- .../hadoop/conf/ReconfigurationServlet.java | 8 ++++---- .../hadoop/crypto/JceAesCtrCryptoCodec.java | 8 ++++---- .../crypto/OpensslAesCtrCryptoCodec.java | 8 ++++---- .../apache/hadoop/crypto/OpensslCipher.java | 8 ++++---- .../crypto/random/OpensslSecureRandom.java | 8 ++++---- .../hadoop/crypto/random/OsSecureRandom.java | 9 +++++---- .../apache/hadoop/fs/AbstractFileSystem.java | 6 +++--- .../java/org/apache/hadoop/fs/ChecksumFs.java | 8 ++++---- .../hadoop/fs/DelegationTokenRenewer.java | 10 +++++----- .../org/apache/hadoop/fs/FSInputChecker.java | 9 +++++---- .../org/apache/hadoop/fs/FileContext.java | 10 +++++----- .../java/org/apache/hadoop/fs/FileUtil.java | 10 +++++----- .../java/org/apache/hadoop/fs/FsShell.java | 6 +++--- .../apache/hadoop/fs/FsShellPermissions.java | 4 ++-- .../java/org/apache/hadoop/fs/Globber.java | 7 ++++--- .../org/apache/hadoop/fs/HarFileSystem.java | 11 +++++----- .../apache/hadoop/fs/LocalDirAllocator.java | 9 +++++---- .../main/java/org/apache/hadoop/fs/Trash.java | 6 +++--- .../apache/hadoop/fs/TrashPolicyDefault.java | 8 ++++---- .../apache/hadoop/fs/ftp/FTPFileSystem.java | 8 ++++---- .../hadoop/fs/permission/FsPermission.java | 6 +++--- .../hadoop/fs/sftp/SFTPConnectionPool.java | 7 ++++--- .../apache/hadoop/fs/sftp/SFTPFileSystem.java | 7 ++++--- .../org/apache/hadoop/fs/shell/Command.java | 6 +++--- .../hadoop/ha/ActiveStandbyElector.java | 15 +++++++------- .../apache/hadoop/ha/FailoverController.java | 10 +++++----- .../java/org/apache/hadoop/ha/HAAdmin.java | 8 ++++---- .../org/apache/hadoop/ha/HealthMonitor.java | 8 ++++---- .../java/org/apache/hadoop/ha/NodeFencer.java | 6 +++--- .../apache/hadoop/ha/SshFenceByTcpPort.java | 11 +++------- .../hadoop/ha/ZKFailoverController.java | 20 +++++++++---------- ...ServiceProtocolServerSideTranslatorPB.java | 6 +++--- .../org/apache/hadoop/http/HttpServer.java | 6 +++--- .../org/apache/hadoop/http/HttpServer2.java | 7 ++++--- .../hadoop/http/lib/StaticUserWebFilter.java | 7 ++++--- .../org/apache/hadoop/io/BloomMapFile.java | 6 +++--- .../apache/hadoop/io/FastByteComparisons.java | 7 +++---- .../java/org/apache/hadoop/io/IOUtils.java | 4 ++-- .../java/org/apache/hadoop/io/MapFile.java | 8 ++++---- .../org/apache/hadoop/io/ReadaheadPool.java | 6 +++--- .../org/apache/hadoop/io/SequenceFile.java | 7 ++++--- .../main/java/org/apache/hadoop/io/UTF8.java | 5 +++-- .../apache/hadoop/io/compress/CodecPool.java | 6 +++--- .../io/compress/CompressionCodecFactory.java | 8 ++++---- .../hadoop/io/compress/DefaultCodec.java | 6 +++--- .../io/compress/bzip2/Bzip2Compressor.java | 8 ++++---- .../io/compress/bzip2/Bzip2Decompressor.java | 8 ++++---- .../io/compress/bzip2/Bzip2Factory.java | 6 +++--- .../hadoop/io/compress/lz4/Lz4Compressor.java | 8 ++++---- .../io/compress/lz4/Lz4Decompressor.java | 8 ++++---- .../io/compress/snappy/SnappyCompressor.java | 8 ++++---- .../compress/snappy/SnappyDecompressor.java | 8 ++++---- .../io/compress/zlib/BuiltInZlibDeflater.java | 8 ++++---- .../io/compress/zlib/ZlibCompressor.java | 8 ++++---- .../hadoop/io/compress/zlib/ZlibFactory.java | 7 +++---- .../apache/hadoop/io/file/tfile/BCFile.java | 6 +++--- .../hadoop/io/file/tfile/Compression.java | 6 +++--- .../apache/hadoop/io/file/tfile/TFile.java | 8 ++++---- .../hadoop/io/file/tfile/TFileDumper.java | 8 ++++---- .../apache/hadoop/io/nativeio/NativeIO.java | 16 +++++++-------- .../nativeio/SharedFileDescriptorFactory.java | 7 ++++--- .../apache/hadoop/io/retry/RetryPolicies.java | 6 +++--- .../apache/hadoop/io/retry/RetryUtils.java | 6 +++--- .../io/serializer/SerializationFactory.java | 8 ++++---- .../apache/hadoop/ipc/CallQueueManager.java | 7 ++++--- .../java/org/apache/hadoop/ipc/Client.java | 6 +++--- .../org/apache/hadoop/ipc/FairCallQueue.java | 6 +++--- .../apache/hadoop/ipc/ProtobufRpcEngine.java | 7 ++++--- .../main/java/org/apache/hadoop/ipc/RPC.java | 6 +++--- .../apache/hadoop/ipc/RefreshRegistry.java | 7 ++++--- .../org/apache/hadoop/ipc/RetryCache.java | 6 +++--- .../java/org/apache/hadoop/ipc/Server.java | 16 +++++++-------- .../ipc/WeightedRoundRobinMultiplexer.java | 8 ++++---- .../apache/hadoop/ipc/WritableRpcEngine.java | 6 +++--- .../hadoop/ipc/metrics/RetryCacheMetrics.java | 6 +++--- .../ipc/metrics/RpcDetailedMetrics.java | 8 ++++---- .../apache/hadoop/ipc/metrics/RpcMetrics.java | 6 +++--- .../org/apache/hadoop/jmx/JMXJsonServlet.java | 7 ++++--- .../apache/hadoop/metrics/MetricsUtil.java | 7 +++---- .../metrics/ganglia/GangliaContext.java | 8 ++++---- .../metrics/ganglia/GangliaContext31.java | 10 +++++----- .../apache/hadoop/metrics/jvm/JvmMetrics.java | 6 +++--- .../hadoop/metrics/spi/CompositeContext.java | 8 ++++---- .../hadoop/metrics/util/MetricsIntValue.java | 8 ++++---- .../metrics/util/MetricsTimeVaryingInt.java | 8 ++++---- .../metrics/util/MetricsTimeVaryingLong.java | 8 ++++---- .../metrics/util/MetricsTimeVaryingRate.java | 8 ++++---- .../metrics2/impl/MBeanInfoBuilder.java | 2 +- .../hadoop/metrics2/impl/MetricsConfig.java | 8 ++++---- .../metrics2/impl/MetricsSinkAdapter.java | 9 +++++---- .../metrics2/impl/MetricsSourceAdapter.java | 7 ++++--- .../metrics2/impl/MetricsSystemImpl.java | 6 +++--- .../hadoop/metrics2/lib/MethodMetric.java | 7 ++++--- .../metrics2/lib/MetricsSourceBuilder.java | 7 ++++--- .../metrics2/lib/MutableMetricsFactory.java | 7 ++++--- .../hadoop/metrics2/lib/MutableRates.java | 7 +++---- .../lib/MutableRatesWithAggregation.java | 7 ++++--- .../hadoop/metrics2/sink/GraphiteSink.java | 7 ++++--- .../sink/ganglia/AbstractGangliaSink.java | 10 +++++----- .../metrics2/sink/ganglia/GangliaSink30.java | 6 +++--- .../metrics2/sink/ganglia/GangliaSink31.java | 8 ++++---- .../apache/hadoop/metrics2/util/MBeans.java | 6 +++--- .../hadoop/metrics2/util/MetricsCache.java | 6 +++--- .../main/java/org/apache/hadoop/net/DNS.java | 6 +++--- .../java/org/apache/hadoop/net/NetUtils.java | 6 +++--- .../apache/hadoop/net/ScriptBasedMapping.java | 8 ++++---- .../hadoop/net/SocketIOWithTimeout.java | 6 +++--- .../org/apache/hadoop/net/TableMapping.java | 6 +++--- .../apache/hadoop/net/unix/DomainSocket.java | 10 +++++----- .../hadoop/net/unix/DomainSocketWatcher.java | 12 +++++------ .../security/CompositeGroupsMapping.java | 7 ++++--- .../apache/hadoop/security/Credentials.java | 10 +++++----- .../org/apache/hadoop/security/Groups.java | 7 +++---- .../HttpCrossOriginFilterInitializer.java | 8 ++++---- .../security/JniBasedUnixGroupsMapping.java | 8 ++++---- ...JniBasedUnixGroupsMappingWithFallback.java | 8 ++++---- .../JniBasedUnixGroupsNetgroupMapping.java | 8 ++++---- ...UnixGroupsNetgroupMappingWithFallback.java | 8 ++++---- .../hadoop/security/LdapGroupsMapping.java | 7 ++++--- .../apache/hadoop/security/ProviderUtils.java | 7 ++++--- .../hadoop/security/SaslInputStream.java | 7 ++++--- .../apache/hadoop/security/SaslRpcClient.java | 7 ++++--- .../apache/hadoop/security/SaslRpcServer.java | 6 +++--- .../apache/hadoop/security/SecurityUtil.java | 9 ++++----- .../hadoop/security/ShellBasedIdMapping.java | 8 ++++---- .../ShellBasedUnixGroupsNetgroupMapping.java | 8 ++++---- .../security/WhitelistBasedResolver.java | 7 ++++--- .../alias/AbstractJavaKeyStoreProvider.java | 6 +++--- .../ServiceAuthorizationManager.java | 9 +++++---- .../security/http/CrossOriginFilter.java | 7 ++++--- .../ssl/FileBasedKeyStoresFactory.java | 8 ++++---- .../ssl/ReloadingX509TrustManager.java | 7 ++++--- .../ssl/SslSelectChannelConnectorSecure.java | 8 ++++---- .../apache/hadoop/security/token/Token.java | 6 +++--- .../AbstractDelegationTokenSecretManager.java | 8 ++++---- .../hadoop/service/AbstractService.java | 9 +++++---- .../hadoop/service/CompositeService.java | 7 ++++--- .../service/LoggingStateChangeListener.java | 11 +++++----- .../hadoop/service/ServiceOperations.java | 5 +++-- .../tracing/TracerConfigurationManager.java | 8 ++++---- .../hadoop/util/ApplicationClassLoader.java | 10 +++++----- .../apache/hadoop/util/AsyncDiskService.java | 7 ++++--- .../hadoop/util/CombinedIPWhiteList.java | 7 ++++--- .../apache/hadoop/util/FileBasedIPList.java | 11 +++++----- .../java/org/apache/hadoop/util/GSet.java | 6 +++--- .../hadoop/util/GenericOptionsParser.java | 7 ++++--- .../apache/hadoop/util/HostsFileReader.java | 9 +++++---- .../hadoop/util/IntrusiveCollection.java | 7 ++++--- .../apache/hadoop/util/JvmPauseMonitor.java | 6 +++--- .../org/apache/hadoop/util/MachineList.java | 6 +++--- .../apache/hadoop/util/NativeCodeLoader.java | 8 ++++---- .../hadoop/util/NodeHealthScriptRunner.java | 7 ++++--- .../java/org/apache/hadoop/util/Progress.java | 6 +++--- .../hadoop/util/ShutdownHookManager.java | 7 ++++--- .../hadoop/util/ShutdownThreadsHelper.java | 7 ++++--- .../org/apache/hadoop/util/SysInfoLinux.java | 7 +++---- .../apache/hadoop/util/SysInfoWindows.java | 7 ++++--- .../org/apache/hadoop/util/ThreadUtil.java | 7 +++---- .../org/apache/hadoop/util/VersionInfo.java | 8 ++++---- .../util/concurrent/AsyncGetFuture.java | 7 ++++--- .../util/concurrent/ExecutorHelper.java | 8 ++++---- .../HadoopScheduledThreadPoolExecutor.java | 8 ++++---- .../concurrent/HadoopThreadPoolExecutor.java | 8 ++++---- .../org/apache/hadoop/cli/CLITestHelper.java | 9 +++++---- .../hadoop/crypto/CryptoStreamsTestBase.java | 6 +++--- .../apache/hadoop/crypto/TestCryptoCodec.java | 7 ++++--- .../hadoop/fs/FCStatisticsBaseTest.java | 7 ++++--- .../org/apache/hadoop/fs/TestFileContext.java | 7 ++++--- .../org/apache/hadoop/fs/TestFileStatus.java | 8 ++++---- .../org/apache/hadoop/fs/TestFileUtil.java | 6 +++--- .../org/apache/hadoop/fs/TestFsShellCopy.java | 6 +++--- .../hadoop/fs/TestFsShellReturnCode.java | 8 ++++---- .../apache/hadoop/fs/TestFsShellTouch.java | 6 +++--- .../apache/hadoop/fs/TestHarFileSystem.java | 7 ++++--- .../fs/contract/AbstractBondedFSContract.java | 8 ++++---- .../fs/loadGenerator/LoadGenerator.java | 10 +++++----- .../ha/ActiveStandbyElectorTestUtil.java | 6 +++--- .../org/apache/hadoop/ha/DummyHAService.java | 7 ++++--- .../org/apache/hadoop/ha/MiniZKFCCluster.java | 7 ++++--- .../org/apache/hadoop/ha/TestHAAdmin.java | 6 +++--- .../apache/hadoop/ha/TestHealthMonitor.java | 6 +++--- .../apache/hadoop/http/TestGlobalFilter.java | 6 +++--- .../apache/hadoop/http/TestHttpServer.java | 6 +++--- .../hadoop/http/TestHttpServerLogs.java | 6 +++--- .../hadoop/http/TestHttpServerWebapps.java | 9 +++++---- .../hadoop/http/TestHttpServerWithSpengo.java | 7 ++++--- .../apache/hadoop/http/TestPathFilter.java | 6 +++--- .../apache/hadoop/http/TestSSLHttpServer.java | 7 ++++--- .../apache/hadoop/http/TestServletFilter.java | 6 +++--- .../hadoop/http/resource/JerseyResource.java | 6 +++--- .../org/apache/hadoop/io/TestArrayFile.java | 7 +++++-- .../hadoop/io/TestDefaultStringifier.java | 8 +++++--- .../apache/hadoop/io/TestSequenceFile.java | 7 ++++--- .../org/apache/hadoop/io/TestSetFile.java | 7 +++---- .../apache/hadoop/io/TestWritableUtils.java | 7 ++++--- .../apache/hadoop/io/compress/TestCodec.java | 9 +++++---- .../compress/TestCompressionStreamReuse.java | 9 +++++---- .../hadoop/io/nativeio/TestNativeIO.java | 10 +++++----- .../TestSharedFileDescriptorFactory.java | 7 ++++--- .../org/apache/hadoop/ipc/TestAsyncIPC.java | 10 +++++----- .../java/org/apache/hadoop/ipc/TestIPC.java | 17 ++++++++-------- .../hadoop/ipc/TestIPCServerResponder.java | 10 +++++----- .../ipc/TestProtoBufRpcServerHandoff.java | 12 +++++------ .../java/org/apache/hadoop/ipc/TestRPC.java | 6 +++--- .../hadoop/ipc/TestRPCCompatibility.java | 8 ++++---- .../hadoop/ipc/TestRPCServerShutdown.java | 7 ++++--- .../hadoop/ipc/TestRpcServerHandoff.java | 8 ++++---- .../org/apache/hadoop/ipc/TestSaslRPC.java | 7 +++---- .../org/apache/hadoop/ipc/TestServer.java | 8 ++++---- .../TestWeightedRoundRobinMultiplexer.java | 7 ++++--- .../metrics2/impl/TestGangliaMetrics.java | 7 ++++--- .../metrics2/impl/TestMetricsConfig.java | 7 ++++--- .../metrics2/impl/TestMetricsSystemImpl.java | 11 +++++----- .../hadoop/metrics2/impl/TestSinkQueue.java | 10 ++++++---- .../metrics2/lib/TestMutableMetrics.java | 7 ++++--- .../metrics2/util/TestMetricsCache.java | 8 +++++--- .../apache/hadoop/net/ServerSocketUtil.java | 9 +++++---- .../java/org/apache/hadoop/net/TestDNS.java | 6 +++--- .../org/apache/hadoop/net/TestNetUtils.java | 6 +++--- .../hadoop/net/TestSocketIOWithTimeout.java | 8 +++++--- .../apache/hadoop/net/TestStaticMapping.java | 7 ++++--- .../hadoop/net/unix/TestDomainSocket.java | 2 +- .../net/unix/TestDomainSocketWatcher.java | 15 +++++++------- .../security/TestCompositeGroupMapping.java | 7 ++++--- .../security/TestDoAsEffectiveUser.java | 7 ++++--- .../hadoop/security/TestGroupFallback.java | 7 ++++--- .../hadoop/security/TestGroupsCaching.java | 8 ++++---- .../TestShellBasedUnixGroupsMapping.java | 8 ++++---- .../alias/TestCredentialProviderFactory.java | 7 ++++--- .../authorize/TestAccessControlList.java | 8 ++++---- .../security/authorize/TestProxyUsers.java | 8 ++++---- .../token/delegation/TestDelegationToken.java | 7 ++++--- .../hadoop/service/TestCompositeService.java | 7 ++++--- .../hadoop/service/TestServiceLifecycle.java | 7 ++++--- .../apache/hadoop/test/MetricsAsserts.java | 6 +++--- .../hadoop/test/MultithreadedTestUtil.java | 8 ++++---- .../apache/hadoop/test/TestJUnitSetup.java | 7 ++++--- .../hadoop/util/TestAsyncDiskService.java | 7 ++++--- .../org/apache/hadoop/util/TestClasspath.java | 9 +++++---- .../hadoop/util/TestIdentityHashStore.java | 7 ++++--- .../hadoop/util/TestLightWeightGSet.java | 7 ++++--- .../util/TestLightWeightResizableGSet.java | 7 ++++--- .../hadoop/util/TestNativeCodeLoader.java | 6 +++--- .../apache/hadoop/util/TestSignalLogger.java | 11 +++++----- .../org/apache/hadoop/util/TestWinUtils.java | 6 +++--- .../org/apache/hadoop/mount/MountdBase.java | 12 +++++------ .../org/apache/hadoop/nfs/NfsExports.java | 6 +++--- .../apache/hadoop/nfs/nfs3/FileHandle.java | 6 +++--- .../org/apache/hadoop/nfs/nfs3/Nfs3Base.java | 10 +++++----- .../hadoop/oncrpc/RegistrationClient.java | 7 ++++--- .../org/apache/hadoop/oncrpc/RpcCall.java | 8 ++++---- .../org/apache/hadoop/oncrpc/RpcProgram.java | 6 +++--- .../org/apache/hadoop/oncrpc/RpcUtil.java | 11 +++++----- .../hadoop/oncrpc/SimpleTcpClientHandler.java | 7 ++++--- .../apache/hadoop/oncrpc/SimpleTcpServer.java | 7 ++++--- .../apache/hadoop/oncrpc/SimpleUdpServer.java | 7 ++++--- .../hadoop/oncrpc/security/Credentials.java | 6 +++--- .../oncrpc/security/SecurityHandler.java | 7 ++++--- .../org/apache/hadoop/portmap/Portmap.java | 8 ++++---- .../hadoop/portmap/RpcProgramPortmap.java | 7 ++++--- 262 files changed, 1044 insertions(+), 955 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 253c47a9d6d..ceef588a8e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -77,8 +77,6 @@ import com.google.common.base.Charsets; import org.apache.commons.collections.map.UnmodifiableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -98,6 +96,8 @@ import org.codehaus.jackson.JsonGenerator; import org.codehaus.stax2.XMLInputFactory2; import org.codehaus.stax2.XMLStreamReader2; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; @@ -183,11 +183,11 @@ @InterfaceStability.Stable public class Configuration implements Iterable>, Writable { - private static final Log LOG = - LogFactory.getLog(Configuration.class); + private static final Logger LOG = + LoggerFactory.getLogger(Configuration.class); - private static final Log LOG_DEPRECATION = - LogFactory.getLog("org.apache.hadoop.conf.Configuration.deprecation"); + private static final Logger LOG_DEPRECATION = LoggerFactory.getLogger( + "org.apache.hadoop.conf.Configuration.deprecation"); private boolean quietmode = true; @@ -2801,10 +2801,10 @@ private Resource loadResource(Properties properties, } return null; } catch (IOException e) { - LOG.fatal("error parsing conf " + name, e); + LOG.error("error parsing conf " + name, e); throw new RuntimeException(e); } catch (XMLStreamException e) { - LOG.fatal("error parsing conf " + name, e); + LOG.error("error parsing conf " + name, e); throw new RuntimeException(e); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java index bdd006dfa77..146c6d844f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java @@ -22,9 +22,10 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import org.apache.commons.logging.*; import org.apache.hadoop.util.Time; import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Collection; @@ -41,8 +42,8 @@ public abstract class ReconfigurableBase extends Configured implements Reconfigurable { - private static final Log LOG = - LogFactory.getLog(ReconfigurableBase.class); + private static final Logger LOG = + LoggerFactory.getLogger(ReconfigurableBase.class); // Use for testing purpose. private ReconfigurationUtil reconfigurationUtil = new ReconfigurationUtil(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java index bb221ee361f..5a616f72b91 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java @@ -18,8 +18,6 @@ package org.apache.hadoop.conf; -import org.apache.commons.logging.*; - import org.apache.commons.lang.StringEscapeUtils; import java.util.Collection; @@ -33,6 +31,8 @@ import javax.servlet.http.HttpServletResponse; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A servlet for changing a node's configuration. @@ -45,8 +45,8 @@ public class ReconfigurationServlet extends HttpServlet { private static final long serialVersionUID = 1L; - private static final Log LOG = - LogFactory.getLog(ReconfigurationServlet.class); + private static final Logger LOG = + LoggerFactory.getLogger(ReconfigurationServlet.class); // the prefix used to fing the attribute holding the reconfigurable // for a given request diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java index 61ee743c421..de0e5dd6268 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/JceAesCtrCryptoCodec.java @@ -26,12 +26,12 @@ import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY; @@ -42,8 +42,8 @@ */ @InterfaceAudience.Private public class JceAesCtrCryptoCodec extends AesCtrCryptoCodec { - private static final Log LOG = - LogFactory.getLog(JceAesCtrCryptoCodec.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(JceAesCtrCryptoCodec.class.getName()); private Configuration conf; private String provider; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java index d08e58882ca..8d01f42095e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslAesCtrCryptoCodec.java @@ -26,22 +26,22 @@ import java.security.SecureRandom; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import com.google.common.base.Preconditions; import org.apache.hadoop.crypto.random.OsSecureRandom; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implement the AES-CTR crypto codec using JNI into OpenSSL. */ @InterfaceAudience.Private public class OpensslAesCtrCryptoCodec extends AesCtrCryptoCodec { - private static final Log LOG = - LogFactory.getLog(OpensslAesCtrCryptoCodec.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslAesCtrCryptoCodec.class.getName()); private Configuration conf; private Random random; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java index 264652b202a..286f9ac7dd1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java @@ -26,12 +26,12 @@ import javax.crypto.NoSuchPaddingException; import javax.crypto.ShortBufferException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OpenSSL cipher using JNI. @@ -40,8 +40,8 @@ */ @InterfaceAudience.Private public final class OpensslCipher { - private static final Log LOG = - LogFactory.getLog(OpensslCipher.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslCipher.class.getName()); public static final int ENCRYPT_MODE = 1; public static final int DECRYPT_MODE = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java index b1fa9883373..519595a8d00 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OpensslSecureRandom.java @@ -19,12 +19,12 @@ import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * OpenSSL secure random using JNI. @@ -43,8 +43,8 @@ @InterfaceAudience.Private public class OpensslSecureRandom extends Random { private static final long serialVersionUID = -7828193502768789584L; - private static final Log LOG = - LogFactory.getLog(OpensslSecureRandom.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(OpensslSecureRandom.class.getName()); /** If native SecureRandom unavailable, use java SecureRandom */ private java.security.SecureRandom fallback = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java index 9428b981751..66715916f56 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java @@ -23,12 +23,12 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT; @@ -39,7 +39,8 @@ */ @InterfaceAudience.Private public class OsSecureRandom extends Random implements Closeable, Configurable { - public static final Log LOG = LogFactory.getLog(OsSecureRandom.class); + public static final Logger LOG = + LoggerFactory.getLogger(OsSecureRandom.class); private static final long serialVersionUID = 6391500337172057900L; @@ -112,7 +113,7 @@ synchronized protected int next(int nbits) { @Override synchronized public void close() { if (stream != null) { - IOUtils.cleanup(LOG, stream); + IOUtils.cleanupWithLogger(LOG, stream); stream = null; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java index ef684372744..9bea8f91371 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java @@ -32,8 +32,6 @@ import java.util.StringTokenizer; import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -52,6 +50,8 @@ import org.apache.hadoop.util.Progressable; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides an interface for implementors of a Hadoop file system @@ -66,7 +66,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public abstract class AbstractFileSystem { - static final Log LOG = LogFactory.getLog(AbstractFileSystem.class); + static final Logger LOG = LoggerFactory.getLogger(AbstractFileSystem.class); /** Recording statistics per a file system class. */ private static final Map diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java index 384b32cbe0a..b536b27439e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java @@ -27,14 +27,14 @@ import java.util.Arrays; import java.util.EnumSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstract Checksumed Fs. @@ -110,8 +110,8 @@ private int getSumBufferSize(int bytesPerSum, int bufferSize, Path file) * It verifies that data matches checksums. *******************************************************/ private static class ChecksumFSInputChecker extends FSInputChecker { - public static final Log LOG - = LogFactory.getLog(FSInputChecker.class); + public static final Logger LOG = + LoggerFactory.getLogger(FSInputChecker.class); private static final int HEADER_LENGTH = 8; private ChecksumFs fs; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java index 3542a9b585e..09c3a8ad3d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java @@ -26,12 +26,12 @@ import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A daemon thread that waits for the next file system to renew. @@ -39,8 +39,8 @@ @InterfaceAudience.Private public class DelegationTokenRenewer extends Thread { - private static final Log LOG = LogFactory - .getLog(DelegationTokenRenewer.class); + private static final Logger LOG = LoggerFactory + .getLogger(DelegationTokenRenewer.class); /** The renewable interface used by the renewer. */ public interface Renewable { @@ -243,7 +243,7 @@ public void removeRenewAction( LOG.error("Interrupted while canceling token for " + fs.getUri() + "filesystem"); if (LOG.isDebugEnabled()) { - LOG.debug(ie.getStackTrace()); + LOG.debug("Exception in removeRenewAction: ", ie); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java index 85056794204..5f8291d612b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java @@ -22,11 +22,12 @@ import java.io.InputStream; import java.util.zip.Checksum; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.nio.ByteBuffer; import java.nio.IntBuffer; @@ -37,8 +38,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS"}) @InterfaceStability.Unstable abstract public class FSInputChecker extends FSInputStream { - public static final Log LOG - = LogFactory.getLog(FSInputChecker.class); + public static final Logger LOG = + LoggerFactory.getLogger(FSInputChecker.class); /** The file name from which data is read from */ protected Path file; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index b2f0f472f51..341d11f75bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -35,8 +35,6 @@ import java.util.TreeSet; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -62,6 +60,8 @@ import com.google.common.base.Preconditions; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The FileContext class provides an interface to the application writer for @@ -178,7 +178,7 @@ @InterfaceStability.Stable public class FileContext { - public static final Log LOG = LogFactory.getLog(FileContext.class); + public static final Logger LOG = LoggerFactory.getLogger(FileContext.class); /** * Default permission for directory and symlink * In previous versions, this default permission was also used to @@ -343,7 +343,7 @@ public AbstractFileSystem run() throws UnsupportedFileSystemException { } }); } catch (InterruptedException ex) { - LOG.error(ex); + LOG.error(ex.toString()); throw new IOException("Failed to get the AbstractFileSystem for path: " + uri, ex); } @@ -457,7 +457,7 @@ public static FileContext getFileContext(final URI defaultFsUri, } catch (UnsupportedFileSystemException ex) { throw ex; } catch (IOException ex) { - LOG.error(ex); + LOG.error(ex.toString()); throw new RuntimeException(ex); } return getFileContext(defaultAfs, aConf); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 155f0be9c29..f279e5eb2e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -45,8 +45,6 @@ import org.apache.commons.collections.map.CaseInsensitiveMap; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -57,6 +55,8 @@ import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of file-processing util methods @@ -65,7 +65,7 @@ @InterfaceStability.Evolving public class FileUtil { - private static final Log LOG = LogFactory.getLog(FileUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); /* The error code is defined in winutils to indicate insufficient * privilege to create symbolic links. This value need to keep in @@ -732,7 +732,7 @@ private static void unTarUsingJava(File inFile, File untarDir, entry = tis.getNextTarEntry(); } } finally { - IOUtils.cleanup(LOG, tis, inputStream); + IOUtils.cleanupWithLogger(LOG, tis, inputStream); } } @@ -1317,7 +1317,7 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, bos = new BufferedOutputStream(fos); jos = new JarOutputStream(bos, jarManifest); } finally { - IOUtils.cleanup(LOG, jos, bos, fos); + IOUtils.cleanupWithLogger(LOG, jos, bos, fos); } String[] jarCp = {classPathJar.getCanonicalPath(), unexpandedWildcardClasspath.toString()}; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java index 97b65f23b6e..eccfbfc6219 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java @@ -24,8 +24,6 @@ import java.util.LinkedList; import org.apache.commons.lang.WordUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; @@ -39,12 +37,14 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Provide command line access to a FileSystem. */ @InterfaceAudience.Private public class FsShell extends Configured implements Tool { - static final Log LOG = LogFactory.getLog(FsShell.class); + static final Logger LOG = LoggerFactory.getLogger(FsShell.class); private static final int MAX_LINE_WIDTH = 80; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java index 0a829298ca4..76e379c51f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShellPermissions.java @@ -22,7 +22,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.permission.ChmodParser; @@ -32,6 +31,7 @@ import org.apache.hadoop.fs.shell.FsCommand; import org.apache.hadoop.fs.shell.PathData; import org.apache.hadoop.util.Shell; +import org.slf4j.Logger; /** * This class is the home for file permissions related commands. @@ -41,7 +41,7 @@ @InterfaceStability.Unstable public class FsShellPermissions extends FsCommand { - static Log LOG = FsShell.LOG; + static final Logger LOG = FsShell.LOG; /** * Register the permission related commands with the factory diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java index 7c69167c3a1..ca3db1d98eb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java @@ -23,18 +23,19 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Unstable class Globber { - public static final Log LOG = LogFactory.getLog(Globber.class.getName()); + public static final Logger LOG = + LoggerFactory.getLogger(Globber.class.getName()); private final FileSystem fs; private final FileContext fc; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java index b5d83f2f9cf..aa58706ed11 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.EOFException; import java.io.FileNotFoundException; @@ -50,7 +50,8 @@ public class HarFileSystem extends FileSystem { - private static final Log LOG = LogFactory.getLog(HarFileSystem.class); + private static final Logger LOG = + LoggerFactory.getLogger(HarFileSystem.class); public static final String METADATA_CACHE_ENTRIES_KEY = "fs.har.metadatacache.entries"; public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10; @@ -1173,7 +1174,7 @@ private void parseMetaData() throws IOException { LOG.warn("Encountered exception ", ioe); throw ioe; } finally { - IOUtils.cleanup(LOG, lin, in); + IOUtils.cleanupWithLogger(LOG, lin, in); } FSDataInputStream aIn = fs.open(archiveIndexPath); @@ -1198,7 +1199,7 @@ private void parseMetaData() throws IOException { } } } finally { - IOUtils.cleanup(LOG, aIn); + IOUtils.cleanupWithLogger(LOG, aIn); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java index 1ed01ea07ff..c1e9d21ecc7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java @@ -23,14 +23,15 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.*; import org.apache.hadoop.util.*; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An implementation of a round-robin scheme for disk allocation for creating * files. The way it works is that it is kept track what disk was last @@ -245,8 +246,8 @@ int getCurrentDirectoryIndex() { private static class AllocatorPerContext { - private final Log LOG = - LogFactory.getLog(AllocatorPerContext.class); + private static final Logger LOG = + LoggerFactory.getLogger(AllocatorPerContext.class); private Random dirIndexRandomizer = new Random(); private String contextCfgItemName; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java index b7718121ee8..35cd80483d2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java @@ -19,11 +19,12 @@ import java.io.IOException; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Provides a trash facility which supports pluggable Trash policies. @@ -34,8 +35,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class Trash extends Configured { - private static final org.apache.commons.logging.Log LOG = - LogFactory.getLog(Trash.class); + private static final Logger LOG = LoggerFactory.getLogger(Trash.class); private TrashPolicy trashPolicy; // configured trash policy instance diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java index c65e16ae5fb..265e967b01e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java @@ -30,8 +30,6 @@ import java.util.Collection; import java.util.Date; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -41,6 +39,8 @@ import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Provides a trash feature. Files are moved to a user's trash * directory, a subdirectory of their home directory named ".Trash". Files are @@ -54,8 +54,8 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class TrashPolicyDefault extends TrashPolicy { - private static final Log LOG = - LogFactory.getLog(TrashPolicyDefault.class); + private static final Logger LOG = + LoggerFactory.getLogger(TrashPolicyDefault.class); private static final Path CURRENT = new Path("Current"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java index 3259463cf54..e0ef42f8ced 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ftp/FTPFileSystem.java @@ -25,8 +25,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPFile; @@ -45,6 +43,8 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -56,8 +56,8 @@ @InterfaceStability.Stable public class FTPFileSystem extends FileSystem { - public static final Log LOG = LogFactory - .getLog(FTPFileSystem.class); + public static final Logger LOG = LoggerFactory + .getLogger(FTPFileSystem.class); public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java index 379e41f6903..7d366485bbc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java @@ -21,8 +21,6 @@ import java.io.DataOutput; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -30,6 +28,8 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableFactories; import org.apache.hadoop.io.WritableFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class for file/directory permissions. @@ -37,7 +37,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class FsPermission implements Writable { - private static final Log LOG = LogFactory.getLog(FsPermission.class); + private static final Logger LOG = LoggerFactory.getLogger(FsPermission.class); static final WritableFactory FACTORY = new WritableFactory() { @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java index c7fae7bd5f6..de86bab6d33 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPConnectionPool.java @@ -23,19 +23,20 @@ import java.util.Iterator; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.StringUtils; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.JSch; import com.jcraft.jsch.JSchException; import com.jcraft.jsch.Session; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Concurrent/Multiple Connections. */ class SFTPConnectionPool { - public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(SFTPFileSystem.class); // Maximum number of allowed live connections. This doesn't mean we cannot // have more live connections. It means that when we have more // live connections than this threshold, any unused connection will be diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java index e181ced0175..61cdfc4c64e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/sftp/SFTPFileSystem.java @@ -26,8 +26,6 @@ import java.util.ArrayList; import java.util.Vector; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -41,11 +39,14 @@ import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpATTRS; import com.jcraft.jsch.SftpException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** SFTP FileSystem. */ public class SFTPFileSystem extends FileSystem { - public static final Log LOG = LogFactory.getLog(SFTPFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(SFTPFileSystem.class); private SFTPConnectionPool connectionPool; private URI uri; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java index cda26e835ab..3b26acffe31 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java @@ -26,8 +26,6 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -35,6 +33,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathNotFoundException; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An abstract class for the execution of a file system command @@ -58,7 +58,7 @@ abstract public class Command extends Configured { private int depth = 0; protected ArrayList exceptions = new ArrayList(); - private static final Log LOG = LogFactory.getLog(Command.class); + private static final Logger LOG = LoggerFactory.getLogger(Command.class); /** allows stdout to be captured if necessary */ public PrintStream out = System.out; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 1ce116e3955..b5cac25739d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -26,8 +26,6 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -47,6 +45,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -141,7 +141,8 @@ public interface ActiveStandbyElectorCallback { @VisibleForTesting protected static final String BREADCRUMB_FILENAME = "ActiveBreadCrumb"; - public static final Log LOG = LogFactory.getLog(ActiveStandbyElector.class); + public static final Logger LOG = + LoggerFactory.getLogger(ActiveStandbyElector.class); private static final int SLEEP_AFTER_FAILURE_TO_BECOME_ACTIVE = 1000; @@ -712,7 +713,7 @@ protected ZooKeeper createZooKeeper() throws IOException { } private void fatalError(String errorMessage) { - LOG.fatal(errorMessage); + LOG.error(errorMessage); reset(); appClient.notifyFatalError(errorMessage); } @@ -824,10 +825,10 @@ private boolean reEstablishSession() { createConnection(); success = true; } catch(IOException e) { - LOG.warn(e); + LOG.warn(e.toString()); sleepFor(5000); } catch(KeeperException e) { - LOG.warn(e); + LOG.warn(e.toString()); sleepFor(5000); } ++connectionRetryCount; @@ -866,7 +867,7 @@ public synchronized void terminateConnection() { try { tempZk.close(); } catch(InterruptedException e) { - LOG.warn(e); + LOG.warn(e.toString()); } zkConnectionState = ConnectionState.TERMINATED; wantToBeInElection = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java index d952e293819..3c05a259571 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/FailoverController.java @@ -19,9 +19,6 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -32,6 +29,8 @@ import org.apache.hadoop.ipc.RPC; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The FailOverController is responsible for electing an active service @@ -43,7 +42,8 @@ @InterfaceStability.Evolving public class FailoverController { - private static final Log LOG = LogFactory.getLog(FailoverController.class); + private static final Logger LOG = + LoggerFactory.getLogger(FailoverController.class); private final int gracefulFenceTimeout; private final int rpcTimeoutToNewActive; @@ -252,7 +252,7 @@ public void failover(HAServiceTarget fromSvc, } catch (FailoverFailedException ffe) { msg += ". Failback to " + fromSvc + " failed (" + ffe.getMessage() + ")"; - LOG.fatal(msg); + LOG.error(msg); } } throw new FailoverFailedException(msg, cause); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java index 5eff14c1081..9b7d7ba5d1a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java @@ -28,8 +28,6 @@ import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -43,6 +41,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A command-line tool for making calls in the HAServiceProtocol. @@ -62,7 +62,7 @@ public abstract class HAAdmin extends Configured implements Tool { * operation, which is why it is not documented in the usage below. */ private static final String FORCEMANUAL = "forcemanual"; - private static final Log LOG = LogFactory.getLog(HAAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(HAAdmin.class); private int rpcTimeoutForChecks = -1; @@ -449,7 +449,7 @@ protected int runCmd(String[] argv) throws Exception { if (cmdLine.hasOption(FORCEMANUAL)) { if (!confirmForceManual()) { - LOG.fatal("Aborted"); + LOG.error("Aborted"); return -1; } // Instruct the NNs to honor this request even if they're diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java index 24c149c4583..a93df756496 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java @@ -23,8 +23,6 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import static org.apache.hadoop.fs.CommonConfigurationKeys.*; import org.apache.hadoop.ha.HAServiceProtocol; @@ -35,6 +33,8 @@ import org.apache.hadoop.util.Daemon; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is a daemon which runs in a loop, periodically heartbeating @@ -47,7 +47,7 @@ */ @InterfaceAudience.Private public class HealthMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( HealthMonitor.class); private Daemon daemon; @@ -283,7 +283,7 @@ private MonitorDaemon() { setUncaughtExceptionHandler(new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { - LOG.fatal("Health monitor failed", e); + LOG.error("Health monitor failed", e); enterState(HealthMonitor.State.HEALTH_MONITOR_FAILED); } }); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java index 4898b38726f..63f6db68e2d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/NodeFencer.java @@ -22,8 +22,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -31,6 +29,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class parses the configured list of fencing methods, and @@ -61,7 +61,7 @@ public class NodeFencer { private static final Pattern HASH_COMMENT_RE = Pattern.compile("#.*$"); - private static final Log LOG = LogFactory.getLog(NodeFencer.class); + private static final Logger LOG = LoggerFactory.getLogger(NodeFencer.class); /** * Standard fencing methods included with Hadoop. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java index 64cd5a894c4..9ae113b0ea6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java @@ -23,8 +23,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configured; import com.google.common.annotations.VisibleForTesting; @@ -272,7 +270,7 @@ private int parseConfiggedPort(String portStr) * Adapter from JSch's logger interface to our log4j */ private static class LogAdapter implements com.jcraft.jsch.Logger { - static final Log LOG = LogFactory.getLog( + static final Logger LOG = LoggerFactory.getLogger( SshFenceByTcpPort.class.getName() + ".jsch"); @Override @@ -285,9 +283,8 @@ public boolean isEnabled(int level) { case com.jcraft.jsch.Logger.WARN: return LOG.isWarnEnabled(); case com.jcraft.jsch.Logger.ERROR: - return LOG.isErrorEnabled(); case com.jcraft.jsch.Logger.FATAL: - return LOG.isFatalEnabled(); + return LOG.isErrorEnabled(); default: return false; } @@ -306,10 +303,8 @@ public void log(int level, String message) { LOG.warn(message); break; case com.jcraft.jsch.Logger.ERROR: - LOG.error(message); - break; case com.jcraft.jsch.Logger.FATAL: - LOG.fatal(message); + LOG.error(message); break; default: break; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java index e33f0127045..7ada04a6ac9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java @@ -27,8 +27,6 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -56,11 +54,13 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.LimitedPrivate("HDFS") public abstract class ZKFailoverController { - static final Log LOG = LogFactory.getLog(ZKFailoverController.class); + static final Logger LOG = LoggerFactory.getLogger(ZKFailoverController.class); public static final String ZK_QUORUM_KEY = "ha.zookeeper.quorum"; private static final String ZK_SESSION_TIMEOUT_KEY = "ha.zookeeper.session-timeout.ms"; @@ -161,7 +161,7 @@ public HAServiceTarget getLocalTarget() { public int run(final String[] args) throws Exception { if (!localTarget.isAutoFailoverEnabled()) { - LOG.fatal("Automatic failover is not enabled for " + localTarget + "." + + LOG.error("Automatic failover is not enabled for " + localTarget + "." + " Please ensure that automatic failover is enabled in the " + "configuration before running the ZK failover controller."); return ERR_CODE_AUTO_FAILOVER_NOT_ENABLED; @@ -183,7 +183,7 @@ public Integer run() { } }); } catch (RuntimeException rte) { - LOG.fatal("The failover controller encounters runtime error: " + rte); + LOG.error("The failover controller encounters runtime error: " + rte); throw (Exception)rte.getCause(); } } @@ -194,7 +194,7 @@ private int doRun(String[] args) try { initZK(); } catch (KeeperException ke) { - LOG.fatal("Unable to start failover controller. Unable to connect " + LOG.error("Unable to start failover controller. Unable to connect " + "to ZooKeeper quorum at " + zkQuorum + ". Please check the " + "configured value for " + ZK_QUORUM_KEY + " and ensure that " + "ZooKeeper is running."); @@ -220,7 +220,7 @@ private int doRun(String[] args) } if (!elector.parentZNodeExists()) { - LOG.fatal("Unable to start failover controller. " + LOG.error("Unable to start failover controller. " + "Parent znode does not exist.\n" + "Run with -formatZK flag to initialize ZooKeeper."); return ERR_CODE_NO_PARENT_ZNODE; @@ -229,7 +229,7 @@ private int doRun(String[] args) try { localTarget.checkFencingConfigured(); } catch (BadFencingConfigurationException e) { - LOG.fatal("Fencing is not configured for " + localTarget + ".\n" + + LOG.error("Fencing is not configured for " + localTarget + ".\n" + "You must configure a fencing method before using automatic " + "failover.", e); return ERR_CODE_NO_FENCER; @@ -375,7 +375,7 @@ private synchronized void mainLoop() throws InterruptedException { } private synchronized void fatalError(String err) { - LOG.fatal("Fatal error occurred:" + err); + LOG.error("Fatal error occurred:" + err); fatalError = err; notifyAll(); } @@ -394,7 +394,7 @@ private synchronized void becomeActive() throws ServiceFailedException { } catch (Throwable t) { String msg = "Couldn't make " + localTarget + " active"; - LOG.fatal(msg, t); + LOG.error(msg, t); recordActiveAttempt(new ActiveAttemptRecord(false, msg + "\n" + StringUtils.stringifyException(t))); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java index 63bfbcafdfa..7f755825e97 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolServerSideTranslatorPB.java @@ -19,8 +19,6 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.ha.HAServiceProtocol; @@ -42,6 +40,8 @@ import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is used on the server side. Calls come across the wire for the @@ -61,7 +61,7 @@ public class HAServiceProtocolServerSideTranslatorPB implements TransitionToActiveResponseProto.newBuilder().build(); private static final TransitionToStandbyResponseProto TRANSITION_TO_STANDBY_RESP = TransitionToStandbyResponseProto.newBuilder().build(); - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( HAServiceProtocolServerSideTranslatorPB.class); public HAServiceProtocolServerSideTranslatorPB(HAServiceProtocol server) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index e6008a5fd66..d517fefe634 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -44,8 +44,6 @@ import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.ConfServlet; @@ -82,6 +80,8 @@ import org.mortbay.util.MultiException; import com.sun.jersey.spi.container.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Create a Jetty embedded server to answer http requests. The primary goal @@ -100,7 +100,7 @@ @InterfaceStability.Evolving @Deprecated public class HttpServer implements FilterContainer { - public static final Log LOG = LogFactory.getLog(HttpServer.class); + public static final Logger LOG = LoggerFactory.getLogger(HttpServer.class); static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index de68ecb552d..861fa485b97 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -46,8 +46,6 @@ import javax.servlet.http.HttpServletResponse; import com.google.common.collect.ImmutableMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -96,6 +94,9 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.sun.jersey.spi.container.servlet.ServletContainer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER; import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; @@ -112,7 +113,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public final class HttpServer2 implements FilterContainer { - public static final Log LOG = LogFactory.getLog(HttpServer2.class); + public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java index 9ca5b927df4..fc64697bb8c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/lib/StaticUserWebFilter.java @@ -29,11 +29,11 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.Filter; @@ -47,7 +47,8 @@ public class StaticUserWebFilter extends FilterInitializer { static final String DEPRECATED_UGI_KEY = "dfs.web.ugi"; - private static final Log LOG = LogFactory.getLog(StaticUserWebFilter.class); + private static final Logger LOG = + LoggerFactory.getLogger(StaticUserWebFilter.class); static class User implements Principal { private final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java index d4514c65bd4..519fcd74cbb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java @@ -22,8 +22,6 @@ import java.io.DataOutputStream; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.util.bloom.Filter; import org.apache.hadoop.util.bloom.Key; import org.apache.hadoop.util.hash.Hash; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAPFILE_BLOOM_ERROR_RATE_KEY; @@ -52,7 +52,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class BloomMapFile { - private static final Log LOG = LogFactory.getLog(BloomMapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(BloomMapFile.class); public static final String BLOOM_FILE_NAME = "bloom"; public static final int HASH_COUNT = 5; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java index a3fea3115cb..d5ab4d22c23 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FastByteComparisons.java @@ -22,11 +22,10 @@ import java.security.AccessController; import java.security.PrivilegedAction; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import com.google.common.primitives.Longs; import com.google.common.primitives.UnsignedBytes; @@ -36,7 +35,7 @@ * class to be able to compare arrays that start at non-zero offsets. */ abstract class FastByteComparisons { - static final Log LOG = LogFactory.getLog(FastByteComparisons.class); + static final Logger LOG = LoggerFactory.getLogger(FastByteComparisons.class); /** * Lexicographically compare two byte arrays. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java index b639edc1c5d..6142cd6f223 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java @@ -32,13 +32,13 @@ import java.util.List; import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Shell; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -49,7 +49,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class IOUtils { - public static final Log LOG = LogFactory.getLog(IOUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(IOUtils.class); /** * Copies from one stream to another. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java index 5ba506a178b..fde1c869788 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -37,6 +35,8 @@ import org.apache.hadoop.util.Options; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_MAP_INDEX_SKIP_KEY; @@ -60,7 +60,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class MapFile { - private static final Log LOG = LogFactory.getLog(MapFile.class); + private static final Logger LOG = LoggerFactory.getLogger(MapFile.class); /** The name of the index file. */ public static final String INDEX_FILE_NAME = "index"; @@ -1002,7 +1002,7 @@ public static void main(String[] args) throws Exception { while (reader.next(key, value)) // copy all entries writer.append(key, value); } finally { - IOUtils.cleanup(LOG, writer, reader); + IOUtils.cleanupWithLogger(LOG, writer, reader); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java index a8c06902b11..2e65f12cc09 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java @@ -23,8 +23,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.nativeio.NativeIO; @@ -33,6 +31,8 @@ import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Manages a pool of threads which can issue readahead requests on file descriptors. @@ -40,7 +40,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class ReadaheadPool { - static final Log LOG = LogFactory.getLog(ReadaheadPool.class); + static final Logger LOG = LoggerFactory.getLogger(ReadaheadPool.class); private static final int POOL_SIZE = 4; private static final int MAX_POOL_SIZE = 16; private static final int CAPACITY = 1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 16ee8743e1a..253aceaf93e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -24,7 +24,6 @@ import java.rmi.server.UID; import java.security.MessageDigest; -import org.apache.commons.logging.*; import org.apache.hadoop.util.Options; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.Options.CreateOpts; @@ -50,6 +49,8 @@ import org.apache.hadoop.util.MergeSort; import org.apache.hadoop.util.PriorityQueue; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -202,7 +203,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class SequenceFile { - private static final Log LOG = LogFactory.getLog(SequenceFile.class); + private static final Logger LOG = LoggerFactory.getLogger(SequenceFile.class); private SequenceFile() {} // no public ctor @@ -1893,7 +1894,7 @@ private void initialize(Path filename, FSDataInputStream in, succeeded = true; } finally { if (!succeeded) { - IOUtils.cleanup(LOG, this.in); + IOUtils.cleanupWithLogger(LOG, this.in); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java index 89f1e428bb3..f5d33a13005 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java @@ -25,9 +25,10 @@ import org.apache.hadoop.util.StringUtils; -import org.apache.commons.logging.*; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A WritableComparable for strings that uses the UTF8 encoding. * @@ -42,7 +43,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Stable public class UTF8 implements WritableComparable { - private static final Log LOG= LogFactory.getLog(UTF8.class); + private static final Logger LOG= LoggerFactory.getLogger(UTF8.class); private static final DataInputBuffer IBUF = new DataInputBuffer(); private static final ThreadLocal OBUF_FACTORY = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java index 01bffa78a1d..f103aad4e85 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -33,6 +31,8 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A global compressor/decompressor pool used to save and reuse @@ -41,7 +41,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class CodecPool { - private static final Log LOG = LogFactory.getLog(CodecPool.class); + private static final Logger LOG = LoggerFactory.getLogger(CodecPool.class); /** * A global compressor pool used to save the expensive diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 8fff75d01d3..3701f2026af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -19,8 +19,6 @@ import java.util.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -28,6 +26,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory that will find the correct codec for a given filename. @@ -36,8 +36,8 @@ @InterfaceStability.Evolving public class CompressionCodecFactory { - public static final Log LOG = - LogFactory.getLog(CompressionCodecFactory.class.getName()); + public static final Logger LOG = + LoggerFactory.getLogger(CompressionCodecFactory.class.getName()); private static final ServiceLoader CODEC_PROVIDERS = ServiceLoader.load(CompressionCodec.class); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java index 31196cc7288..33f39ef9297 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DefaultCodec.java @@ -22,14 +22,14 @@ import java.io.InputStream; import java.io.OutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.zlib.ZlibDecompressor; import org.apache.hadoop.io.compress.zlib.ZlibFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; @@ -37,7 +37,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class DefaultCodec implements Configurable, CompressionCodec, DirectDecompressionCodec { - private static final Log LOG = LogFactory.getLog(DefaultCodec.class); + private static final Logger LOG = LoggerFactory.getLogger(DefaultCodec.class); Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java index a973dc93340..d4a9787a4ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java @@ -24,9 +24,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -42,7 +41,8 @@ public class Bzip2Compressor implements Compressor { static final int DEFAULT_BLOCK_SIZE = 9; static final int DEFAULT_WORK_FACTOR = 30; - private static final Log LOG = LogFactory.getLog(Bzip2Compressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Compressor.class); private long stream; private int blockSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java index 3135165e879..96693ad30d4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java @@ -23,9 +23,8 @@ import java.nio.ByteBuffer; import org.apache.hadoop.io.compress.Decompressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the popular @@ -36,7 +35,8 @@ public class Bzip2Decompressor implements Decompressor { private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; - private static final Log LOG = LogFactory.getLog(Bzip2Decompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(Bzip2Decompressor.class); private long stream; private boolean conserveMemory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java index 0bbcc364a29..7ddae7771e8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Factory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.compress.bzip2; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.util.NativeCodeLoader; @@ -30,6 +28,8 @@ import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor; import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor; import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -37,7 +37,7 @@ * */ public class Bzip2Factory { - private static final Log LOG = LogFactory.getLog(Bzip2Factory.class); + private static final Logger LOG = LoggerFactory.getLogger(Bzip2Factory.class); private static String bzip2LibraryName = ""; private static boolean nativeBzip2Loaded; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java index ccfae8b3c36..3792c365b4d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Compressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Compressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java index 685956cc1bf..f26ae8481c3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java @@ -22,18 +22,18 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the lz4 compression algorithm. * http://code.google.com/p/lz4/ */ public class Lz4Decompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(Lz4Compressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(Lz4Compressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java index 814718d99ef..3d386800e4d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyCompressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyCompressor implements Compressor { - private static final Log LOG = - LogFactory.getLog(SnappyCompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyCompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java index 8712431673e..f31b76c347c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java @@ -22,19 +22,19 @@ import java.nio.Buffer; import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DirectDecompressor; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Decompressor} based on the snappy compression algorithm. * http://code.google.com/p/snappy/ */ public class SnappyDecompressor implements Decompressor { - private static final Log LOG = - LogFactory.getLog(SnappyDecompressor.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SnappyDecompressor.class.getName()); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64 * 1024; private int directBufferSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java index 509456e8347..739788fa5f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInZlibDeflater.java @@ -23,9 +23,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A wrapper around java.util.zip.Deflater to make it conform @@ -34,7 +33,8 @@ */ public class BuiltInZlibDeflater extends Deflater implements Compressor { - private static final Log LOG = LogFactory.getLog(BuiltInZlibDeflater.class); + private static final Logger LOG = + LoggerFactory.getLogger(BuiltInZlibDeflater.class); public BuiltInZlibDeflater(int level, boolean nowrap) { super(level, nowrap); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java index 24d98a53397..d7b153b7720 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java @@ -25,9 +25,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.util.NativeCodeLoader; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link Compressor} based on the popular @@ -37,7 +36,8 @@ */ public class ZlibCompressor implements Compressor { - private static final Log LOG = LogFactory.getLog(ZlibCompressor.class); + private static final Logger LOG = + LoggerFactory.getLogger(ZlibCompressor.class); private static final int DEFAULT_DIRECT_BUFFER_SIZE = 64*1024; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index 4112d272398..a2bad42f3f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -18,8 +18,6 @@ package org.apache.hadoop.io.compress.zlib; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Decompressor; @@ -28,6 +26,8 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A collection of factories to create the right @@ -35,8 +35,7 @@ * */ public class ZlibFactory { - private static final Log LOG = - LogFactory.getLog(ZlibFactory.class); + private static final Logger LOG = LoggerFactory.getLogger(ZlibFactory.class); private static boolean nativeZlibLoaded = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java index ce932665742..43d829937e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/BCFile.java @@ -30,8 +30,6 @@ import java.util.Map; import java.util.TreeMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -43,6 +41,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.ScalarLong; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Block Compressed file, the underlying physical storage layer for TFile. @@ -54,7 +54,7 @@ final class BCFile { // the current version of BCFile impl, increment them (major or minor) made // enough changes static final Version API_VERSION = new Version((short) 1, (short) 0); - static final Log LOG = LogFactory.getLog(BCFile.class); + static final Logger LOG = LoggerFactory.getLogger(BCFile.class); /** * Prevent the instantiation of BCFile objects. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java index f7ec7ac8353..2298dc001ec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Compression.java @@ -24,8 +24,6 @@ import java.io.OutputStream; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.compress.CodecPool; import org.apache.hadoop.io.compress.CompressionCodec; @@ -35,6 +33,8 @@ import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeys.IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY; @@ -44,7 +44,7 @@ * Compression related stuff. */ final class Compression { - static final Log LOG = LogFactory.getLog(Compression.class); + static final Logger LOG = LoggerFactory.getLogger(Compression.class); /** * Prevent the instantiation of class. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 56739c6c7e0..c63baa550b1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -29,8 +29,6 @@ import java.util.ArrayList; import java.util.Comparator; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -51,6 +49,8 @@ import org.apache.hadoop.io.file.tfile.CompareUtils.MemcmpRawComparator; import org.apache.hadoop.io.file.tfile.Utils.Version; import org.apache.hadoop.io.serializer.JavaSerializationComparator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A TFile is a container of key-value pairs. Both keys and values are type-less @@ -131,7 +131,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class TFile { - static final Log LOG = LogFactory.getLog(TFile.class); + static final Logger LOG = LoggerFactory.getLogger(TFile.class); private static final String CHUNK_BUF_SIZE_ATTR = "tfile.io.chunk.size"; private static final String FS_INPUT_BUF_SIZE_ATTR = @@ -335,7 +335,7 @@ public void close() throws IOException { writerBCF.close(); } } finally { - IOUtils.cleanup(LOG, blkAppender, writerBCF); + IOUtils.cleanupWithLogger(LOG, blkAppender, writerBCF); blkAppender = null; writerBCF = null; state = State.CLOSED; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java index 84b92eceff0..3ef6b278923 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java @@ -25,8 +25,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; @@ -36,12 +34,14 @@ import org.apache.hadoop.io.file.tfile.BCFile.MetaIndexEntry; import org.apache.hadoop.io.file.tfile.TFile.TFileIndexEntry; import org.apache.hadoop.io.file.tfile.Utils.Version; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Dumping the information of a TFile. */ class TFileDumper { - static final Log LOG = LogFactory.getLog(TFileDumper.class); + static final Logger LOG = LoggerFactory.getLogger(TFileDumper.class); private TFileDumper() { // namespace object not constructable. @@ -290,7 +290,7 @@ static public void dumpInfo(String file, PrintStream out, Configuration conf) } } } finally { - IOUtils.cleanup(LOG, reader, fsdis); + IOUtils.cleanupWithLogger(LOG, reader, fsdis); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java index 7e9283e715f..d95efb6a9aa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java @@ -40,9 +40,9 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.PerformanceAdvisory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import sun.misc.Unsafe; import com.google.common.annotations.VisibleForTesting; @@ -98,7 +98,7 @@ public static class POSIX { write. */ public static int SYNC_FILE_RANGE_WAIT_AFTER = 4; - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); // Set to true via JNI if possible public static boolean fadvisePossible = false; @@ -634,7 +634,7 @@ public static boolean access(String path, AccessRight desiredAccess) } } - private static final Log LOG = LogFactory.getLog(NativeIO.class); + private static final Logger LOG = LoggerFactory.getLogger(NativeIO.class); private static boolean nativeLoaded = false; @@ -940,10 +940,10 @@ public static void copyFileUnbuffered(File src, File dst) throws IOException { position += transferred; } } finally { - IOUtils.cleanup(LOG, output); - IOUtils.cleanup(LOG, fos); - IOUtils.cleanup(LOG, input); - IOUtils.cleanup(LOG, fis); + IOUtils.cleanupWithLogger(LOG, output); + IOUtils.cleanupWithLogger(LOG, fos); + IOUtils.cleanupWithLogger(LOG, input); + IOUtils.cleanupWithLogger(LOG, fis); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java index 306244a3579..412634462a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.java @@ -22,10 +22,10 @@ import java.io.FileDescriptor; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A factory for creating shared file descriptors inside a given directory. @@ -45,7 +45,8 @@ @InterfaceAudience.Private @InterfaceStability.Unstable public class SharedFileDescriptorFactory { - public static final Log LOG = LogFactory.getLog(SharedFileDescriptorFactory.class); + public static final Logger LOG = + LoggerFactory.getLogger(SharedFileDescriptorFactory.class); private final String prefix; private final String path; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java index d6f3e04f0e5..fa0cb6e6f03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java @@ -32,8 +32,6 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.StandbyException; @@ -41,6 +39,8 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -49,7 +49,7 @@ */ public class RetryPolicies { - public static final Log LOG = LogFactory.getLog(RetryPolicies.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryPolicies.class); /** *

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java index 15a9b54432d..1f5acfea1ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java @@ -19,17 +19,17 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.retry.RetryPolicies.MultipleLinearRandomRetry; import org.apache.hadoop.ipc.RemoteException; import com.google.protobuf.ServiceException; import org.apache.hadoop.ipc.RetriableException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class RetryUtils { - public static final Log LOG = LogFactory.getLog(RetryUtils.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryUtils.class); /** * Return the default retry policy set in conf. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java index aa3c86ae4ec..a9787a0d813 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java @@ -21,8 +21,6 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -31,6 +29,8 @@ import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization; import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -41,8 +41,8 @@ @InterfaceStability.Evolving public class SerializationFactory extends Configured { - private static final Log LOG = - LogFactory.getLog(SerializationFactory.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(SerializationFactory.class.getName()); private List> serializations = new ArrayList>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java index 2764788579a..d1bd1807b03 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java @@ -28,20 +28,21 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcStatusProto; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Abstracts queue operations for different blocking queues. */ public class CallQueueManager extends AbstractQueue implements BlockingQueue { - public static final Log LOG = LogFactory.getLog(CallQueueManager.class); + public static final Logger LOG = + LoggerFactory.getLogger(CallQueueManager.class); // Number of checkpoints for empty queue. private static final int CHECKPOINT_NUM = 20; // Interval to check empty queue. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index 4164c7d0dbd..c225d996ac8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -21,8 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability; @@ -57,6 +55,8 @@ import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.Span; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import javax.security.sasl.Sasl; @@ -84,7 +84,7 @@ @InterfaceStability.Evolving public class Client implements AutoCloseable { - public static final Log LOG = LogFactory.getLog(Client.class); + public static final Logger LOG = LoggerFactory.getLogger(Client.class); /** A counter for generating call IDs. */ private static final AtomicInteger callIdCounter = new AtomicInteger(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java index 8bcaf059367..20161b8e28f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/FairCallQueue.java @@ -33,11 +33,11 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang.NotImplementedException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException; import org.apache.hadoop.metrics2.util.MBeans; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A queue with multiple levels for each priority. @@ -50,7 +50,7 @@ public class FairCallQueue extends AbstractQueue public static final String IPC_CALLQUEUE_PRIORITY_LEVELS_KEY = "faircallqueue.priority-levels"; - public static final Log LOG = LogFactory.getLog(FairCallQueue.class); + public static final Logger LOG = LoggerFactory.getLogger(FairCallQueue.class); /* The queues */ private final ArrayList> queues; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index 3c0aaba8cd1..190c550e6d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -21,8 +21,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.*; import com.google.protobuf.Descriptors.MethodDescriptor; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -39,6 +37,8 @@ import org.apache.hadoop.util.concurrent.AsyncGet; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import java.io.IOException; @@ -55,7 +55,8 @@ */ @InterfaceStability.Evolving public class ProtobufRpcEngine implements RpcEngine { - public static final Log LOG = LogFactory.getLog(ProtobufRpcEngine.class); + public static final Logger LOG = + LoggerFactory.getLogger(ProtobufRpcEngine.class); private static final ThreadLocal> ASYNC_RETURN_MESSAGE = new ThreadLocal<>(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 3f68d6334c3..6d96eabfec0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -37,8 +37,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.*; - import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.*; @@ -59,6 +57,8 @@ import org.apache.hadoop.util.Time; import com.google.protobuf.BlockingService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** A simple RPC mechanism. * @@ -109,7 +109,7 @@ public Writable call(Server server, String protocol, Writable rpcRequest, long receiveTime) throws Exception ; } - static final Log LOG = LogFactory.getLog(RPC.class); + static final Logger LOG = LoggerFactory.getLogger(RPC.class); /** * Get all superInterfaces that extend VersionedProtocol diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java index ee84a04388b..e67e8d9cbeb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java @@ -24,9 +24,9 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Used to registry custom methods to refresh at runtime. @@ -34,7 +34,8 @@ */ @InterfaceStability.Unstable public class RefreshRegistry { - public static final Log LOG = LogFactory.getLog(RefreshRegistry.class); + public static final Logger LOG = + LoggerFactory.getLogger(RefreshRegistry.class); // Used to hold singleton instance private static class RegistryHolder { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java index 7b85286b557..6f6ceb5a6ca 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java @@ -22,8 +22,6 @@ import java.util.UUID; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.ipc.metrics.RetryCacheMetrics; import org.apache.hadoop.util.LightWeightCache; @@ -32,6 +30,8 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maintains a cache of non-idempotent requests that have been successfully @@ -44,7 +44,7 @@ */ @InterfaceAudience.Private public class RetryCache { - public static final Log LOG = LogFactory.getLog(RetryCache.class); + public static final Logger LOG = LoggerFactory.getLogger(RetryCache.class); private final RetryCacheMetrics retryCacheMetrics; private static final int MAX_CAPACITY = 16; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 85b8bfd6c23..a2237bf530d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -69,8 +69,6 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; @@ -124,6 +122,8 @@ import com.google.protobuf.CodedOutputStream; import com.google.protobuf.Message; import org.codehaus.jackson.map.ObjectMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An abstract IPC service. IPC calls take a single {@link Writable} as a * parameter, and return a {@link Writable} as their value. A service runs on @@ -292,9 +292,9 @@ public static RpcInvoker getRpcInvoker(RPC.RpcKind rpcKind) { } - public static final Log LOG = LogFactory.getLog(Server.class); - public static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+Server.class.getName()); + public static final Logger LOG = LoggerFactory.getLogger(Server.class); + public static final Logger AUDITLOG = + LoggerFactory.getLogger("SecurityLogger."+Server.class.getName()); private static final String AUTH_FAILED_FOR = "Auth failed for "; private static final String AUTH_SUCCESSFUL_FOR = "Auth successful for "; @@ -1112,7 +1112,7 @@ private synchronized void doRunLoop() { } catch (IOException ex) { LOG.error("Error in Reader", ex); } catch (Throwable re) { - LOG.fatal("Bug in read selector!", re); + LOG.error("Bug in read selector!", re); ExitUtil.terminate(1, "Bug in read selector!"); } } @@ -2620,7 +2620,7 @@ public void run() { } } finally { CurCall.set(null); - IOUtils.cleanup(LOG, traceScope); + IOUtils.cleanupWithLogger(LOG, traceScope); } } LOG.debug(Thread.currentThread().getName() + ": exiting"); @@ -2629,7 +2629,7 @@ public void run() { } @VisibleForTesting - void logException(Log logger, Throwable e, Call call) { + void logException(Logger logger, Throwable e, Call call) { if (exceptionsHandler.isSuppressedLog(e.getClass())) { return; // Log nothing. } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java index cfda94734cf..d308725c053 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WeightedRoundRobinMultiplexer.java @@ -20,9 +20,9 @@ import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Determines which queue to start reading from, occasionally drawing from @@ -43,8 +43,8 @@ public class WeightedRoundRobinMultiplexer implements RpcMultiplexer { public static final String IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY = "faircallqueue.multiplexer.weights"; - public static final Log LOG = - LogFactory.getLog(WeightedRoundRobinMultiplexer.class); + public static final Logger LOG = + LoggerFactory.getLogger(WeightedRoundRobinMultiplexer.class); private final int numQueues; // The number of queues under our provisioning diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index df45d62bc11..d122b0d992f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -28,8 +28,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.*; - import org.apache.hadoop.io.*; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; @@ -43,11 +41,13 @@ import org.apache.hadoop.conf.*; import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** An RpcEngine implementation for Writable data. */ @InterfaceStability.Evolving public class WritableRpcEngine implements RpcEngine { - private static final Log LOG = LogFactory.getLog(RPC.class); + private static final Logger LOG = LoggerFactory.getLogger(RPC.class); //writableRpcVersion should be updated if there is a change //in format of the rpc messages. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java index a853d642e2f..fc09e0afeb7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RetryCacheMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.ipc.RetryCache; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; @@ -26,6 +24,8 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableCounterLong; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RetryCache-related statistics @@ -35,7 +35,7 @@ @Metrics(about="Aggregate RetryCache metrics", context="rpc") public class RetryCacheMetrics { - static final Log LOG = LogFactory.getLog(RetryCacheMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RetryCacheMetrics.class); final MetricsRegistry registry; final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java index 8b7e995e479..6ed57ec6d97 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcDetailedMetrics.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining RPC method related statistics @@ -37,7 +37,7 @@ public class RpcDetailedMetrics { @Metric MutableRatesWithAggregation rates; @Metric MutableRatesWithAggregation deferredRpcRates; - static final Log LOG = LogFactory.getLog(RpcDetailedMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcDetailedMetrics.class); final MetricsRegistry registry; final String name; @@ -45,7 +45,7 @@ public class RpcDetailedMetrics { name = "RpcDetailedActivityForPort"+ port; registry = new MetricsRegistry("rpcdetailed") .tag("port", "RPC port", String.valueOf(port)); - LOG.debug(registry.info()); + LOG.debug(registry.info().toString()); } public String name() { return name; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java index 8ce13793390..d53d7d3fb58 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/RpcMetrics.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.ipc.metrics; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.classification.InterfaceAudience; @@ -31,6 +29,8 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.lib.MutableRate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is for maintaining the various RPC statistics @@ -40,7 +40,7 @@ @Metrics(about="Aggregate RPC metrics", context="rpc") public class RpcMetrics { - static final Log LOG = LogFactory.getLog(RpcMetrics.class); + static final Logger LOG = LoggerFactory.getLogger(RpcMetrics.class); final Server server; final MetricsRegistry registry; final String name; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java index 6546c05c002..c8b67bd61a9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java @@ -17,11 +17,11 @@ package org.apache.hadoop.jmx; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.http.HttpServer2; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerator; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; @@ -116,7 +116,8 @@ * */ public class JMXJsonServlet extends HttpServlet { - private static final Log LOG = LogFactory.getLog(JMXJsonServlet.class); + private static final Logger LOG = + LoggerFactory.getLogger(JMXJsonServlet.class); static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods"; static final String ACCESS_CONTROL_ALLOW_ORIGIN = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java index 14a3e331173..3759df7d715 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsUtil.java @@ -20,10 +20,10 @@ import java.net.InetAddress; import java.net.UnknownHostException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class to simplify creation and reporting of hadoop metrics. @@ -39,8 +39,7 @@ @InterfaceStability.Evolving public class MetricsUtil { - public static final Log LOG = - LogFactory.getLog(MetricsUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(MetricsUtil.class); /** * Don't allow creation of a new instance of Metrics diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java index 67414c78777..c0a278f88a0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext.java @@ -25,8 +25,6 @@ import java.util.Map; import org.apache.commons.io.Charsets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -34,6 +32,8 @@ import org.apache.hadoop.metrics.spi.AbstractMetricsContext; import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.Util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Context for sending metrics to Ganglia. @@ -63,7 +63,7 @@ public class GangliaContext extends AbstractMetricsContext { private static final int BUFFER_SIZE = 1500; // as per libgmond.c private static final int DEFAULT_MULTICAST_TTL = 1; - private final Log LOG = LogFactory.getLog(this.getClass()); + private final Logger LOG = LoggerFactory.getLogger(this.getClass()); private static final Map typeTable = new HashMap(5); @@ -126,7 +126,7 @@ public void init(String contextName, ContextFactory factory) { datagramSocket = new DatagramSocket(); } } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java index 0cfd31d4f8a..6e803a31434 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java @@ -23,11 +23,11 @@ import java.net.SocketAddress; import java.net.UnknownHostException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.metrics.ContextFactory; import org.apache.hadoop.net.DNS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Context for sending metrics to Ganglia version 3.1.x. @@ -42,8 +42,8 @@ public class GangliaContext31 extends GangliaContext { String hostName = "UNKNOWN.example.com"; - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.util.GangliaContext31"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.util.GangliaContext31"); public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); @@ -62,7 +62,7 @@ public void init(String contextName, ContextFactory factory) { conf.get("dfs.datanode.dns.interface","default"), conf.get("dfs.datanode.dns.nameserver","default")); } catch (UnknownHostException uhe) { - LOG.error(uhe); + LOG.error(uhe.toString()); hostName = "UNKNOWN.example.com"; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java index ed9d3c93ad3..1b1b1ea48af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/jvm/JvmMetrics.java @@ -29,12 +29,12 @@ import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static java.lang.Thread.State.*; import java.lang.management.GarbageCollectorMXBean; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; /** * Singleton class which reports Java Virtual Machine metrics to the metrics API. @@ -50,7 +50,7 @@ public class JvmMetrics implements Updater { private static final float M = 1024*1024; private static JvmMetrics theInstance = null; - private static Log log = LogFactory.getLog(JvmMetrics.class); + private static Logger log = LoggerFactory.getLogger(JvmMetrics.class); private MetricsRecord metrics; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java index f9d3442365d..b04952ef836 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/CompositeContext.java @@ -23,9 +23,6 @@ import java.lang.reflect.Proxy; import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics.ContextFactory; @@ -33,6 +30,8 @@ import org.apache.hadoop.metrics.MetricsRecord; import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.Updater; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @deprecated Use org.apache.hadoop.metrics2 package instead. @@ -42,7 +41,8 @@ @InterfaceStability.Evolving public class CompositeContext extends AbstractMetricsContext { - private static final Log LOG = LogFactory.getLog(CompositeContext.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeContext.class); private static final String ARITY_LABEL = "arity"; private static final String SUB_FMT = "%s.sub%d"; private final ArrayList subctxt = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java index 2199346ce32..435ffff8256 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsIntValue.java @@ -19,8 +19,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsIntValue class is for a metric that is not time varied @@ -34,8 +34,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsIntValue extends MetricsBase { - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private int value; private boolean changed; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java index 30a5f615860..9661245c687 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingInt.java @@ -19,8 +19,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsTimeVaryingInt class is for a metric that naturally @@ -37,8 +37,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsTimeVaryingInt extends MetricsBase { - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private int currentValue; private int previousIntervalValue; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java index ad2fdf6739d..666a1a1cc3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingLong.java @@ -20,8 +20,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsTimeVaryingLong class is for a metric that naturally @@ -38,8 +38,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsTimeVaryingLong extends MetricsBase{ - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); private long currentValue; private long previousIntervalValue; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java index 5f5099864dc..fdb10730c46 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java @@ -19,8 +19,8 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics.MetricsRecord; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The MetricsTimeVaryingRate class is for a rate based metric that @@ -36,8 +36,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) public class MetricsTimeVaryingRate extends MetricsBase { - private static final Log LOG = - LogFactory.getLog("org.apache.hadoop.metrics.util"); + private static final Logger LOG = + LoggerFactory.getLogger("org.apache.hadoop.metrics.util"); static class Metrics { int numOperations = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java index a76acac1939..528211913d6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java @@ -106,7 +106,7 @@ MBeanInfo get() { } ++curRecNo; } - MetricsSystemImpl.LOG.debug(attrs); + MetricsSystemImpl.LOG.debug(attrs.toString()); MBeanAttributeInfo[] attrsArray = new MBeanAttributeInfo[attrs.size()]; return new MBeanInfo(name, description, attrs.toArray(attrsArray), null, null, null); // no ops/ctors/notifications diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 001b731b4c4..0afd6215a3c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -39,18 +39,18 @@ import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsPlugin; import org.apache.hadoop.metrics2.filter.GlobFilter; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Metrics configuration for MetricsSystemImpl */ class MetricsConfig extends SubsetConfiguration { - static final Log LOG = LogFactory.getLog(MetricsConfig.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsConfig.class); static final String DEFAULT_FILE_NAME = "hadoop-metrics2.properties"; static final String PREFIX_DEFAULT = "*."; @@ -115,7 +115,7 @@ static MetricsConfig loadFirst(String prefix, String... fileNames) { LOG.info("loaded properties from "+ fname); LOG.debug(toString(cf)); MetricsConfig mc = new MetricsConfig(cf, prefix); - LOG.debug(mc); + LOG.debug(mc.toString()); return mc; } catch (ConfigurationException e) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java index 62498ea882c..e26092a1f6f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSinkAdapter.java @@ -24,8 +24,6 @@ import static com.google.common.base.Preconditions.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.metrics2.lib.MutableGaugeInt; import org.apache.hadoop.metrics2.lib.MetricsRegistry; @@ -36,13 +34,16 @@ import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An adapter class for metrics sink and associated filters */ class MetricsSinkAdapter implements SinkQueue.Consumer { - private final Log LOG = LogFactory.getLog(MetricsSinkAdapter.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSinkAdapter.class); private final String name, description, context; private final MetricsSink sink; private final MetricsFilter sourceFilter, recordFilter, metricFilter; @@ -210,7 +211,7 @@ void stop() { stopping = true; sinkThread.interrupt(); if (sink instanceof Closeable) { - IOUtils.cleanup(LOG, (Closeable)sink); + IOUtils.cleanupWithLogger(LOG, (Closeable)sink); } try { sinkThread.join(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java index 04cd88cef4c..a22018a0ab4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSourceAdapter.java @@ -33,8 +33,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsFilter; @@ -43,6 +41,8 @@ import static org.apache.hadoop.metrics2.impl.MetricsConfig.*; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.metrics2.util.Contracts.*; @@ -51,7 +51,8 @@ */ class MetricsSourceAdapter implements DynamicMBean { - private static final Log LOG = LogFactory.getLog(MetricsSourceAdapter.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSourceAdapter.class); private final String prefix, name; private final MetricsSource source; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index e97e9483d4a..1166ff98a4a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -36,8 +36,6 @@ import static com.google.common.base.Preconditions.*; import org.apache.commons.configuration.PropertiesConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.math3.util.ArithmeticUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsInfo; @@ -62,6 +60,8 @@ import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A base class for metrics system singletons @@ -70,7 +70,7 @@ @Metrics(context="metricssystem") public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { - static final Log LOG = LogFactory.getLog(MetricsSystemImpl.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsSystemImpl.class); static final String MS_NAME = "MetricsSystem"; static final String MS_STATS_NAME = MS_NAME +",sub=Stats"; static final String MS_STATS_DESC = "Metrics system metrics"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java index 9ab9243c0b2..8f0442a9be7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MethodMetric.java @@ -22,20 +22,21 @@ import static com.google.common.base.Preconditions.*; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.annotation.Metric; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.util.Contracts.*; /** * Metric generated from a method, mostly used by annotation */ class MethodMetric extends MutableMetric { - private static final Log LOG = LogFactory.getLog(MethodMetric.class); + private static final Logger LOG = LoggerFactory.getLogger(MethodMetric.class); private final Object obj; private final Method method; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java index b44a471b364..7730cfd13e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MetricsSourceBuilder.java @@ -24,8 +24,6 @@ import static com.google.common.base.Preconditions.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; @@ -34,13 +32,16 @@ import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to build metrics source object from annotations */ @InterfaceAudience.Private public class MetricsSourceBuilder { - private static final Log LOG = LogFactory.getLog(MetricsSourceBuilder.class); + private static final Logger LOG = + LoggerFactory.getLogger(MetricsSourceBuilder.class); private final Object source; private final MutableMetricsFactory factory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java index 9e7810a9c34..32099907fba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java @@ -22,19 +22,20 @@ import java.lang.reflect.Method; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Private @InterfaceStability.Evolving public class MutableMetricsFactory { - private static final Log LOG = LogFactory.getLog(MutableMetricsFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(MutableMetricsFactory.class); MutableMetric newForField(Field field, Metric annotation, MetricsRegistry registry) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java index 1074e87255e..994eb13e08d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRates.java @@ -24,12 +24,11 @@ import static com.google.common.base.Preconditions.*; import com.google.common.collect.Sets; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsRecordBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to manage a group of mutable rate metrics @@ -43,7 +42,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MutableRates extends MutableMetric { - static final Log LOG = LogFactory.getLog(MutableRates.class); + static final Logger LOG = LoggerFactory.getLogger(MutableRates.class); private final MetricsRegistry registry; private final Set> protocolCache = Sets.newHashSet(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java index 9827ca77e82..26a15063bb4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRatesWithAggregation.java @@ -27,12 +27,12 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.util.SampleStat; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -48,7 +48,8 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MutableRatesWithAggregation extends MutableMetric { - static final Log LOG = LogFactory.getLog(MutableRatesWithAggregation.class); + static final Logger LOG = + LoggerFactory.getLogger(MutableRatesWithAggregation.class); private final Map globalMetrics = new ConcurrentHashMap<>(); private final Set> protocolCache = Sets.newHashSet(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java index a61fa5b97bb..6efd193fc9c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java @@ -19,8 +19,6 @@ package org.apache.hadoop.metrics2.sink; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.AbstractMetric; @@ -28,6 +26,8 @@ import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.MetricsTag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Closeable; import java.io.IOException; @@ -42,7 +42,8 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class GraphiteSink implements MetricsSink, Closeable { - private static final Log LOG = LogFactory.getLog(GraphiteSink.class); + private static final Logger LOG = + LoggerFactory.getLogger(GraphiteSink.class); private static final String SERVER_HOST_KEY = "server_host"; private static final String SERVER_PORT_KEY = "server_port"; private static final String METRICS_PREFIX = "metrics_prefix"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java index 0afc9f22901..2eb813a0ccb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java @@ -26,11 +26,11 @@ import java.util.Map; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.util.Servers; import org.apache.hadoop.net.DNS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This the base class for Ganglia sink classes using metrics2. Lot of the code @@ -41,7 +41,7 @@ */ public abstract class AbstractGangliaSink implements MetricsSink { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); /* * Output of "gmetric --help" showing allowable values @@ -126,7 +126,7 @@ public void init(SubsetConfiguration conf) { conf.getString("dfs.datanode.dns.interface", "default"), conf.getString("dfs.datanode.dns.nameserver", "default")); } catch (UnknownHostException uhe) { - LOG.error(uhe); + LOG.error(uhe.toString()); hostName = "UNKNOWN.example.com"; } } @@ -154,7 +154,7 @@ public void init(SubsetConfiguration conf) { datagramSocket = new DatagramSocket(); } } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } // see if sparseMetrics is supported. Default is false diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java index 37f91c9da98..c110252cd22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink30.java @@ -27,8 +27,6 @@ import java.util.Set; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsException; @@ -37,6 +35,8 @@ import org.apache.hadoop.metrics2.impl.MsInfo; import org.apache.hadoop.metrics2.util.MetricsCache; import org.apache.hadoop.metrics2.util.MetricsCache.Record; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This code supports Ganglia 3.0 @@ -44,7 +44,7 @@ */ public class GangliaSink30 extends AbstractGangliaSink { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); private static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = "tagsForPrefix."; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java index c8315e8e148..908a79dd352 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/GangliaSink31.java @@ -18,10 +18,10 @@ package org.apache.hadoop.metrics2.sink.ganglia; -import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import java.io.IOException; /** * This code supports Ganglia 3.1 @@ -29,7 +29,7 @@ */ public class GangliaSink31 extends GangliaSink30 { - public final Log LOG = LogFactory.getLog(this.getClass()); + public final Logger LOG = LoggerFactory.getLogger(this.getClass()); /** * The method sends metrics to Ganglia servers. The method has been taken from diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java index 8b58ec008cc..06734c8257f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MBeans.java @@ -25,11 +25,11 @@ import javax.management.MBeanServer; import javax.management.ObjectName; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This util class provides a method to register an MBean using @@ -39,7 +39,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class MBeans { - private static final Log LOG = LogFactory.getLog(MBeans.class); + private static final Logger LOG = LoggerFactory.getLogger(MBeans.class); private static final String DOMAIN_PREFIX = "Hadoop:"; private static final String SERVICE_PREFIX = "service="; private static final String NAME_PREFIX = "name="; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java index efcb286fae2..e659846129c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/util/MetricsCache.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.metrics2.AbstractMetric; @@ -33,6 +31,8 @@ import com.google.common.base.Objects; import com.google.common.collect.Maps; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A metrics cache for sinks that don't support sparse updates. @@ -40,7 +40,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class MetricsCache { - static final Log LOG = LogFactory.getLog(MetricsCache.class); + static final Logger LOG = LoggerFactory.getLogger(MetricsCache.class); static final int MAX_RECS_PER_NAME_DEFAULT = 1000; private final Map map = Maps.newHashMap(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java index a6dc8e3d376..81041c110ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java @@ -20,10 +20,10 @@ import com.google.common.net.InetAddresses; import com.sun.istack.Nullable; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.NetworkInterface; @@ -52,7 +52,7 @@ @InterfaceStability.Unstable public class DNS { - private static final Log LOG = LogFactory.getLog(DNS.class); + private static final Logger LOG = LoggerFactory.getLogger(DNS.class); /** * The cached hostname -initially null. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index 40501073fdc..85773364c43 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -44,8 +44,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import org.apache.commons.net.util.SubnetUtils.SubnetInfo; import org.apache.hadoop.classification.InterfaceAudience; @@ -58,11 +56,13 @@ import org.apache.hadoop.util.ReflectionUtils; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Unstable public class NetUtils { - private static final Log LOG = LogFactory.getLog(NetUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(NetUtils.class); private static Map hostToResolved = new HashMap(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java index 3dcb61090da..02b44a54fec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -21,13 +21,13 @@ import java.util.*; import java.io.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class implements the {@link DNSToSwitchMapping} interface using a @@ -145,8 +145,8 @@ protected static class RawScriptBasedMapping extends AbstractDNSToSwitchMapping { private String scriptName; private int maxArgs; //max hostnames per call of the script - private static final Log LOG = - LogFactory.getLog(ScriptBasedMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ScriptBasedMapping.class); /** * Set the configuration and extract the configuration parameters of interest diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java index b50f7e936ba..f489581843f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocketIOWithTimeout.java @@ -31,9 +31,9 @@ import java.util.Iterator; import java.util.LinkedList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This supports input and output streams for a socket channels. @@ -42,7 +42,7 @@ abstract class SocketIOWithTimeout { // This is intentionally package private. - static final Log LOG = LogFactory.getLog(SocketIOWithTimeout.class); + static final Logger LOG = LoggerFactory.getLogger(SocketIOWithTimeout.class); private SelectableChannel channel; private long timeout; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 362cf07b43c..ead9a7430b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -29,12 +29,12 @@ import java.util.Map; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -56,7 +56,7 @@ @InterfaceStability.Evolving public class TableMapping extends CachedDNSToSwitchMapping { - private static final Log LOG = LogFactory.getLog(TableMapping.class); + private static final Logger LOG = LoggerFactory.getLogger(TableMapping.class); public TableMapping() { super(new RawTableMapping()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java index 8379fd1a4bb..ac118c05172 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocket.java @@ -29,12 +29,12 @@ import java.nio.ByteBuffer; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.CloseableReferenceCount; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The implementation of UNIX domain sockets in Java. @@ -60,7 +60,7 @@ public class DomainSocket implements Closeable { } } - static Log LOG = LogFactory.getLog(DomainSocket.class); + static final Logger LOG = LoggerFactory.getLogger(DomainSocket.class); /** * True only if we should validate the paths used in @@ -459,13 +459,13 @@ public int recvFileInputStreams(FileInputStream[] streams, byte buf[], try { closeFileDescriptor0(descriptors[i]); } catch (Throwable t) { - LOG.warn(t); + LOG.warn(t.toString()); } } else if (streams[i] != null) { try { streams[i].close(); } catch (Throwable t) { - LOG.warn(t); + LOG.warn(t.toString()); } finally { streams[i] = null; } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java index e1bcf7e20c3..c7af97f60af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/unix/DomainSocketWatcher.java @@ -33,13 +33,13 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The DomainSocketWatcher watches a set of domain sockets to see when they @@ -68,7 +68,7 @@ public final class DomainSocketWatcher implements Closeable { } } - static Log LOG = LogFactory.getLog(DomainSocketWatcher.class); + static final Logger LOG = LoggerFactory.getLogger(DomainSocketWatcher.class); /** * The reason why DomainSocketWatcher is not available, or null if it is @@ -306,7 +306,7 @@ public void add(DomainSocket sock, Handler handler) { try { if (closed) { handler.handle(sock); - IOUtils.cleanup(LOG, sock); + IOUtils.cleanupWithLogger(LOG, sock); return; } Entry entry = new Entry(sock, handler); @@ -411,7 +411,7 @@ private boolean sendCallback(String caller, TreeMap entries, this + ": file descriptor " + sock.fd + " was closed while " + "still in the poll(2) loop."); } - IOUtils.cleanup(LOG, sock); + IOUtils.cleanupWithLogger(LOG, sock); fdSet.remove(fd); return true; } else { @@ -524,7 +524,7 @@ public void run() { Entry entry = iter.next(); entry.getDomainSocket().refCount.unreference(); entry.getHandler().handle(entry.getDomainSocket()); - IOUtils.cleanup(LOG, entry.getDomainSocket()); + IOUtils.cleanupWithLogger(LOG, entry.getDomainSocket()); iter.remove(); } // Items in toRemove might not be really removed, handle it here diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java index ffa7e2bdbbd..b8cfdf71b21 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/CompositeGroupsMapping.java @@ -25,13 +25,13 @@ import java.util.Set; import java.util.TreeSet; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link GroupMappingServiceProvider} which @@ -48,7 +48,8 @@ public class CompositeGroupsMapping public static final String MAPPING_PROVIDERS_COMBINED_CONFIG_KEY = MAPPING_PROVIDERS_CONFIG_KEY + ".combined"; public static final String MAPPING_PROVIDER_CONFIG_PREFIX = GROUP_MAPPING_CONFIG_PREFIX + ".provider"; - private static final Log LOG = LogFactory.getLog(CompositeGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeGroupsMapping.class); private List providersList = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index 465f4a8ad01..4fdf11f148e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -34,8 +34,6 @@ import java.util.Map; import java.util.Map.Entry; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -48,6 +46,8 @@ import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A class that provides the facilities of reading and writing @@ -56,7 +56,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class Credentials implements Writable { - private static final Log LOG = LogFactory.getLog(Credentials.class); + private static final Logger LOG = LoggerFactory.getLogger(Credentials.class); private Map secretKeysMap = new HashMap(); private Map> tokenMap = @@ -184,7 +184,7 @@ public static Credentials readTokenStorageFile(Path filename, Configuration conf } catch(IOException ioe) { throw new IOException("Exception reading " + filename, ioe); } finally { - IOUtils.cleanup(LOG, in); + IOUtils.cleanupWithLogger(LOG, in); } } @@ -207,7 +207,7 @@ public static Credentials readTokenStorageFile(File filename, Configuration conf } catch(IOException ioe) { throw new IOException("Exception reading " + filename, ioe); } finally { - IOUtils.cleanup(LOG, in); + IOUtils.cleanupWithLogger(LOG, in); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java index a8fa7241b77..2eb7d6d3233 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java @@ -59,9 +59,8 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Timer; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A user-to-groups mapping service. @@ -74,7 +73,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class Groups { - private static final Log LOG = LogFactory.getLog(Groups.class); + private static final Logger LOG = LoggerFactory.getLogger(Groups.class); private final GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java index f9c18166648..47b5a58e003 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/HttpCrossOriginFilterInitializer.java @@ -21,20 +21,20 @@ import java.util.HashMap; import java.util.Map; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.http.FilterContainer; import org.apache.hadoop.http.FilterInitializer; import org.apache.hadoop.security.http.CrossOriginFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class HttpCrossOriginFilterInitializer extends FilterInitializer { public static final String PREFIX = "hadoop.http.cross-origin."; public static final String ENABLED_SUFFIX = "enabled"; - private static final Log LOG = - LogFactory.getLog(HttpCrossOriginFilterInitializer.class); + private static final Logger LOG = + LoggerFactory.getLogger(HttpCrossOriginFilterInitializer.class); @Override public void initFilter(FilterContainer container, Configuration conf) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java index d397e44001c..a0f6142a3c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMapping.java @@ -25,9 +25,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A JNI-based implementation of {@link GroupMappingServiceProvider} @@ -38,8 +38,8 @@ @InterfaceStability.Evolving public class JniBasedUnixGroupsMapping implements GroupMappingServiceProvider { - private static final Log LOG = - LogFactory.getLog(JniBasedUnixGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(JniBasedUnixGroupsMapping.class); static { if (!NativeCodeLoader.isNativeCodeLoaded()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java index 40333fcc5df..f1644305d91 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsMappingWithFallback.java @@ -21,16 +21,16 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.PerformanceAdvisory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class JniBasedUnixGroupsMappingWithFallback implements GroupMappingServiceProvider { - private static final Log LOG = LogFactory - .getLog(JniBasedUnixGroupsMappingWithFallback.class); + private static final Logger LOG = LoggerFactory + .getLogger(JniBasedUnixGroupsMappingWithFallback.class); private GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java index ff4ab989e7c..86dabfbe914 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.java @@ -26,11 +26,11 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.security.NetgroupCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A JNI-based implementation of {@link GroupMappingServiceProvider} @@ -42,8 +42,8 @@ public class JniBasedUnixGroupsNetgroupMapping extends JniBasedUnixGroupsMapping { - private static final Log LOG = LogFactory.getLog( - JniBasedUnixGroupsNetgroupMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(JniBasedUnixGroupsNetgroupMapping.class); native String[] getUsersForNetgroupJNI(String group); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java index 7d77c1097b2..fcc47cb796f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMappingWithFallback.java @@ -21,15 +21,15 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class JniBasedUnixGroupsNetgroupMappingWithFallback implements GroupMappingServiceProvider { - private static final Log LOG = LogFactory - .getLog(JniBasedUnixGroupsNetgroupMappingWithFallback.class); + private static final Logger LOG = LoggerFactory + .getLogger(JniBasedUnixGroupsNetgroupMappingWithFallback.class); private GroupMappingServiceProvider impl; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java index 1a184e842b3..babfa3809b6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java @@ -41,12 +41,12 @@ import javax.naming.ldap.LdapName; import javax.naming.ldap.Rdn; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An implementation of {@link GroupMappingServiceProvider} which @@ -211,7 +211,8 @@ public class LdapGroupsMapping LDAP_CONFIG_PREFIX + ".read.timeout.ms"; public static final int READ_TIMEOUT_DEFAULT = 60 * 1000; // 60 seconds - private static final Log LOG = LogFactory.getLog(LdapGroupsMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(LdapGroupsMapping.class); static final SearchControls SEARCH_CONTROLS = new SearchControls(); static { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index 013e56c9e69..8dcf8b95e0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -26,14 +26,14 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.io.IOUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility methods for both key and credential provider APIs. @@ -57,7 +57,8 @@ public final class ProviderUtils { "Please review the documentation regarding provider passwords in\n" + "the keystore passwords section of the Credential Provider API\n"; - private static final Log LOG = LogFactory.getLog(ProviderUtils.class); + private static final Logger LOG = + LoggerFactory.getLogger(ProviderUtils.class); /** * Hidden ctor to ensure that this utility class isn't diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java index a3d66b977c2..a91a90ac7c9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslInputStream.java @@ -30,10 +30,10 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A SaslInputStream is composed of an InputStream and a SaslServer (or @@ -45,7 +45,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class SaslInputStream extends InputStream implements ReadableByteChannel { - public static final Log LOG = LogFactory.getLog(SaslInputStream.class); + public static final Logger LOG = + LoggerFactory.getLogger(SaslInputStream.class); private final DataInputStream inStream; /** Should we wrap the communication channel? */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index bef877c5121..5b14ebe61ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -45,8 +45,6 @@ import javax.security.sasl.SaslException; import javax.security.sasl.SaslClient; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -74,13 +72,16 @@ import com.google.common.annotations.VisibleForTesting; import com.google.protobuf.ByteString; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + /** * A utility class that encapsulates SASL logic for RPC client */ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class SaslRpcClient { - public static final Log LOG = LogFactory.getLog(SaslRpcClient.class); + public static final Logger LOG = LoggerFactory.getLogger(SaslRpcClient.class); private final UserGroupInformation ugi; private final Class protocol; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 377a0f11270..653bf074429 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -45,8 +45,6 @@ import javax.security.sasl.SaslServerFactory; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -57,6 +55,8 @@ import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.TokenIdentifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A utility class for dealing with SASL on RPC server @@ -64,7 +64,7 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Evolving public class SaslRpcServer { - public static final Log LOG = LogFactory.getLog(SaslRpcServer.class); + public static final Logger LOG = LoggerFactory.getLogger(SaslRpcServer.class); public static final String SASL_DEFAULT_REALM = "default"; private static SaslServerFactory saslFactory; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index b7d1ec04825..20e87544927 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -36,8 +36,6 @@ import javax.security.auth.kerberos.KerberosPrincipal; import javax.security.auth.kerberos.KerberosTicket; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -51,7 +49,8 @@ import org.apache.hadoop.util.StopWatch; import org.apache.hadoop.util.StringUtils; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; //this will need to be replaced someday when there is a suitable replacement import sun.net.dns.ResolverConfiguration; import sun.net.util.IPAddressUtil; @@ -64,7 +63,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public final class SecurityUtil { - public static final Log LOG = LogFactory.getLog(SecurityUtil.class); + public static final Logger LOG = LoggerFactory.getLogger(SecurityUtil.class); public static final String HOSTNAME_PATTERN = "_HOST"; public static final String FAILED_TO_GET_UGI_MSG_HEADER = "Failed to obtain user group information:"; @@ -473,7 +472,7 @@ public static T doAsLoginUserOrFatal(PrivilegedAction action) { try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { - LOG.fatal("Exception while getting login user", e); + LOG.error("Exception while getting login user", e); e.printStackTrace(); Runtime.getRuntime().exit(-1); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index fae556f6ba2..da930b4ff4d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -29,14 +29,14 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple shell-based implementation of {@link IdMappingServiceProvider} @@ -62,8 +62,8 @@ */ public class ShellBasedIdMapping implements IdMappingServiceProvider { - private static final Log LOG = - LogFactory.getLog(ShellBasedIdMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShellBasedIdMapping.class); private final static String OS = System.getProperty("os.name"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java index 4aa4e9f55b5..eff6985471b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsNetgroupMapping.java @@ -23,12 +23,12 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.security.NetgroupCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple shell-based implementation of {@link GroupMappingServiceProvider} @@ -40,8 +40,8 @@ public class ShellBasedUnixGroupsNetgroupMapping extends ShellBasedUnixGroupsMapping { - private static final Log LOG = - LogFactory.getLog(ShellBasedUnixGroupsNetgroupMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShellBasedUnixGroupsNetgroupMapping.class); /** * Get unix groups (parent) and netgroups for given user diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java index 8d4df642995..a64c4de7b66 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java @@ -24,13 +24,13 @@ import javax.security.sasl.Sasl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection; import org.apache.hadoop.util.CombinedIPWhiteList; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** @@ -54,7 +54,8 @@ * */ public class WhitelistBasedResolver extends SaslPropertiesResolver { - public static final Log LOG = LogFactory.getLog(WhitelistBasedResolver.class); + public static final Logger LOG = + LoggerFactory.getLogger(WhitelistBasedResolver.class); private static final String FIXEDWHITELIST_DEFAULT_LOCATION = "/etc/hadoop/fixedwhitelist"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java index 8e4a0a5f811..df783f16edb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java @@ -18,8 +18,6 @@ package org.apache.hadoop.security.alias; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -27,6 +25,8 @@ import org.apache.hadoop.security.ProviderUtils; import com.google.common.base.Charsets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.crypto.spec.SecretKeySpec; import java.io.IOException; @@ -60,7 +60,7 @@ */ @InterfaceAudience.Private public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider { - public static final Log LOG = LogFactory.getLog( + public static final Logger LOG = LoggerFactory.getLogger( AbstractJavaKeyStoreProvider.class); public static final String CREDENTIAL_PASSWORD_ENV_VAR = "HADOOP_CREDSTORE_PASSWORD"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index 9da95dc791a..4c47348fa55 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -23,8 +23,6 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; @@ -36,6 +34,8 @@ import org.apache.hadoop.util.MachineList; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * An authorization manager which handles service-level authorization @@ -69,8 +69,9 @@ public class ServiceAuthorizationManager { public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; - public static final Log AUDITLOG = - LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName()); + public static final Logger AUDITLOG = + LoggerFactory.getLogger( + "SecurityLogger." + ServiceAuthorizationManager.class.getName()); private static final String AUTHZ_SUCCESSFUL_FOR = "Authorization successful for "; private static final String AUTHZ_FAILED_FOR = "Authorization failed for "; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java index ea7876224bd..58d50cf9721 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/CrossOriginFilter.java @@ -35,14 +35,15 @@ import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CrossOriginFilter implements Filter { - private static final Log LOG = LogFactory.getLog(CrossOriginFilter.class); + private static final Logger LOG = + LoggerFactory.getLogger(CrossOriginFilter.class); // HTTP CORS Request Headers static final String ORIGIN = "Origin"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index 4e5901054d8..405059b03bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -18,12 +18,12 @@ package org.apache.hadoop.security.ssl; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; @@ -47,8 +47,8 @@ @InterfaceStability.Evolving public class FileBasedKeyStoresFactory implements KeyStoresFactory { - private static final Log LOG = - LogFactory.getLog(FileBasedKeyStoresFactory.class); + private static final Logger LOG = + LoggerFactory.getLogger(FileBasedKeyStoresFactory.class); public static final String SSL_KEYSTORE_LOCATION_TPL_KEY = "ssl.{0}.keystore.location"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java index 597f8d77df6..3be3635b694 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/ReloadingX509TrustManager.java @@ -18,12 +18,12 @@ package org.apache.hadoop.security.ssl; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; @@ -47,7 +47,8 @@ public final class ReloadingX509TrustManager implements X509TrustManager, Runnable { @VisibleForTesting - static final Log LOG = LogFactory.getLog(ReloadingX509TrustManager.class); + static final Logger LOG = + LoggerFactory.getLogger(ReloadingX509TrustManager.class); @VisibleForTesting static final String RELOAD_ERROR_MESSAGE = "Could not load truststore (keep using existing one) : "; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java index 7ff2292627f..9bf009d1f52 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SslSelectChannelConnectorSecure.java @@ -23,10 +23,10 @@ import javax.net.ssl.SSLEngine; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.mortbay.jetty.security.SslSelectChannelConnector; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This subclass of the Jetty SslSelectChannelConnector exists solely to @@ -36,8 +36,8 @@ */ @InterfaceAudience.Private public class SslSelectChannelConnectorSecure extends SslSelectChannelConnector { - public static final Log LOG = - LogFactory.getLog(SslSelectChannelConnectorSecure.class); + public static final Logger LOG = + LoggerFactory.getLogger(SslSelectChannelConnectorSecure.class); public SslSelectChannelConnectorSecure() { super(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index ed699a74dcb..5a15b949e33 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -22,13 +22,13 @@ import com.google.common.primitives.Bytes; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.*; import org.apache.hadoop.util.ReflectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.*; import java.util.Arrays; @@ -42,7 +42,7 @@ @InterfaceAudience.Public @InterfaceStability.Evolving public class Token implements Writable { - public static final Log LOG = LogFactory.getLog(Token.class); + public static final Logger LOG = LoggerFactory.getLogger(Token.class); private static Map> tokenKindMap; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index cf887450324..f06681b9f5e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -30,8 +30,6 @@ import javax.crypto.SecretKey; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.Text; @@ -43,6 +41,8 @@ import org.apache.hadoop.util.Time; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceAudience.Public @InterfaceStability.Evolving @@ -50,8 +50,8 @@ class AbstractDelegationTokenSecretManager extends SecretManager { - private static final Log LOG = LogFactory - .getLog(AbstractDelegationTokenSecretManager.class); + private static final Logger LOG = LoggerFactory + .getLogger(AbstractDelegationTokenSecretManager.class); private String formatTokenId(TokenIdent id) { return "(" + id + ")"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java index 132768386a1..2a1140f7519 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/AbstractService.java @@ -25,13 +25,13 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is the base implementation class for services. @@ -40,7 +40,8 @@ @Evolving public abstract class AbstractService implements Service { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractService.class); /** * Service name. @@ -258,7 +259,7 @@ public final void close() throws IOException { */ protected final void noteFailure(Exception exception) { if (LOG.isDebugEnabled()) { - LOG.debug("noteFailure " + exception, null); + LOG.debug("noteFailure " + exception, (Throwable) null); } if (exception == null) { //make sure failure logic doesn't itself cause problems diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java index 51cb4a336d6..a5e8c895c65 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/CompositeService.java @@ -21,11 +21,11 @@ import java.util.ArrayList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Composition of services. @@ -34,7 +34,8 @@ @Evolving public class CompositeService extends AbstractService { - private static final Log LOG = LogFactory.getLog(CompositeService.class); + private static final Logger LOG = + LoggerFactory.getLogger(CompositeService.class); /** * Policy on shutdown: attempt to close everything (purest) or diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java index 700999d625d..c978fecf2de 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/LoggingStateChangeListener.java @@ -18,10 +18,10 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a state change listener that logs events at INFO level @@ -30,15 +30,16 @@ @Evolving public class LoggingStateChangeListener implements ServiceStateChangeListener { - private static final Log LOG = LogFactory.getLog(LoggingStateChangeListener.class); + private static final Logger LOG = + LoggerFactory.getLogger(LoggingStateChangeListener.class); - private final Log log; + private final Logger log; /** * Log events to the given log * @param log destination for events */ - public LoggingStateChangeListener(Log log) { + public LoggingStateChangeListener(Logger log) { //force an NPE if a null log came in log.isDebugEnabled(); this.log = log; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java index a0a77ceb307..e7683a2cb22 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/ServiceOperations.java @@ -22,10 +22,10 @@ import java.util.List; import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class contains a set of methods to work with services, especially @@ -34,7 +34,8 @@ @Public @Evolving public final class ServiceOperations { - private static final Log LOG = LogFactory.getLog(AbstractService.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractService.class); private ServiceOperations() { } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java index 75601adb19f..658e4d326b1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TracerConfigurationManager.java @@ -19,13 +19,13 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair; import org.apache.htrace.core.SpanReceiver; import org.apache.htrace.core.TracerPool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides functions for managing the tracer configuration at @@ -33,8 +33,8 @@ */ @InterfaceAudience.Private public class TracerConfigurationManager implements TraceAdminProtocol { - private static final Log LOG = - LogFactory.getLog(TracerConfigurationManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(TracerConfigurationManager.class); private final String confPrefix; private final Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java index 2f46e1fee5b..972bbff4cfd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ApplicationClassLoader.java @@ -29,12 +29,12 @@ import java.util.List; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link URLClassLoader} for application isolation. Classes from the @@ -56,8 +56,8 @@ public class ApplicationClassLoader extends URLClassLoader { private static final String SYSTEM_CLASSES_DEFAULT_KEY = "system.classes.default"; - private static final Log LOG = - LogFactory.getLog(ApplicationClassLoader.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(ApplicationClassLoader.class.getName()); static { try (InputStream is = ApplicationClassLoader.class.getClassLoader() @@ -179,7 +179,7 @@ protected synchronized Class loadClass(String name, boolean resolve) } } catch (ClassNotFoundException e) { if (LOG.isDebugEnabled()) { - LOG.debug(e); + LOG.debug(e.toString()); } ex = e; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java index df151666d47..8e48cb955a3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/AsyncDiskService.java @@ -26,10 +26,10 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /* * This class is a container of multiple thread pools, each for a volume, @@ -43,7 +43,8 @@ @InterfaceStability.Unstable public class AsyncDiskService { - public static final Log LOG = LogFactory.getLog(AsyncDiskService.class); + public static final Logger LOG = + LoggerFactory.getLogger(AsyncDiskService.class); // ThreadPool core pool size private static final int CORE_THREADS_PER_VOLUME = 1; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java index d12c4c11d5d..6d42dc0362e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CombinedIPWhiteList.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class CombinedIPWhiteList implements IPList { - public static final Log LOG = LogFactory.getLog(CombinedIPWhiteList.class); + public static final Logger LOG = + LoggerFactory.getLogger(CombinedIPWhiteList.class); private static final String LOCALHOST_IP = "127.0.0.1"; private final IPList[] networkLists; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java index 6ee1212df35..146f65c6cc0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java @@ -17,6 +17,9 @@ */ package org.apache.hadoop.util; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; @@ -29,9 +32,6 @@ import java.util.HashSet; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - /** * FileBasedIPList loads a list of subnets in CIDR format and ip addresses from * a file. @@ -43,7 +43,8 @@ */ public class FileBasedIPList implements IPList { - private static final Log LOG = LogFactory.getLog(FileBasedIPList.class); + private static final Logger LOG = + LoggerFactory.getLogger(FileBasedIPList.class); private final String fileName; private final MachineList addressList; @@ -107,7 +108,7 @@ private static String[] readLines(String fileName) throws IOException { } } } catch (IOException ioe) { - LOG.error(ioe); + LOG.error(ioe.toString()); throw ioe; } return null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java index e4a8d0f5b5d..fbc1418288c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GSet.java @@ -19,9 +19,9 @@ import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A {@link GSet} is set, @@ -35,7 +35,7 @@ */ @InterfaceAudience.Private public interface GSet extends Iterable { - static final Log LOG = LogFactory.getLog(GSet.class); + Logger LOG = LoggerFactory.getLogger(GSet.class); /** * @return The size of this set. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java index 528865b8a63..7b0a25cf4b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/GenericOptionsParser.java @@ -34,8 +34,6 @@ import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -45,6 +43,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * GenericOptionsParser is a utility to parse command line @@ -113,7 +113,8 @@ @InterfaceStability.Evolving public class GenericOptionsParser { - private static final Log LOG = LogFactory.getLog(GenericOptionsParser.class); + private static final Logger LOG = + LoggerFactory.getLogger(GenericOptionsParser.class); private Configuration conf; private CommandLine commandLine; private final boolean parseSuccessful; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index b7adc38cb18..dab7d621a90 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -31,11 +31,11 @@ import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -47,7 +47,8 @@ @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceStability.Unstable public class HostsFileReader { - private static final Log LOG = LogFactory.getLog(HostsFileReader.class); + private static final Logger LOG = + LoggerFactory.getLogger(HostsFileReader.class); private final AtomicReference current; @@ -171,7 +172,7 @@ public static void readXmlFileToMapWithFileInputStream(String type, } } } catch (IOException|SAXException|ParserConfigurationException e) { - LOG.fatal("error parsing " + filename, e); + LOG.error("error parsing " + filename, e); throw new RuntimeException(e); } finally { fileInputStream.close(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java index 0512d4aa5d1..1ffb7db3feb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/IntrusiveCollection.java @@ -21,11 +21,11 @@ import java.util.Iterator; import java.util.NoSuchElementException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implements an intrusive doubly-linked list. @@ -298,7 +298,8 @@ public boolean addFirst(Element elem) { return true; } - public static final Log LOG = LogFactory.getLog(IntrusiveCollection.class); + public static final Logger LOG = + LoggerFactory.getLogger(IntrusiveCollection.class); @Override public boolean remove(Object o) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java index 80d4468dc55..420ac8bc185 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/JvmPauseMonitor.java @@ -24,8 +24,6 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; @@ -35,6 +33,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class which sets up a simple thread which runs in a loop sleeping @@ -45,7 +45,7 @@ */ @InterfaceAudience.Private public class JvmPauseMonitor extends AbstractService { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( JvmPauseMonitor.class); /** The target sleep time */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java index 2e6c079d0f2..b01330f2432 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/MachineList.java @@ -26,12 +26,12 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import com.google.common.annotations.VisibleForTesting; import com.google.common.net.InetAddresses; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Container class which holds a list of ip/host addresses and @@ -43,7 +43,7 @@ public class MachineList { - public static final Log LOG = LogFactory.getLog(MachineList.class); + public static final Logger LOG = LoggerFactory.getLogger(MachineList.class); public static final String WILDCARD_VALUE = "*"; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java index ff5803c8b41..2578ae719f7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java @@ -18,12 +18,12 @@ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A helper to load the native hadoop code i.e. libhadoop.so. @@ -35,8 +35,8 @@ @InterfaceStability.Unstable public class NativeCodeLoader { - private static final Log LOG = - LogFactory.getLog(NativeCodeLoader.class); + private static final Logger LOG = + LoggerFactory.getLogger(NativeCodeLoader.class); private static boolean nativeCodeLoaded = false; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java index fc392c495bb..cf1e46053a8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NodeHealthScriptRunner.java @@ -25,8 +25,6 @@ import java.util.Timer; import java.util.TimerTask; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.service.AbstractService; @@ -34,6 +32,8 @@ import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -43,7 +43,8 @@ */ public class NodeHealthScriptRunner extends AbstractService { - private static Log LOG = LogFactory.getLog(NodeHealthScriptRunner.class); + private static final Logger LOG = + LoggerFactory.getLogger(NodeHealthScriptRunner.class); /** Absolute path to the health script. */ private String nodeHealthScript; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java index 3fbc9350f17..bd1c0f4a62a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Progress.java @@ -20,10 +20,10 @@ import java.util.ArrayList; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Utility to assist with generation of progress reports. Applications build * a hierarchy of {@link Progress} instances, each modelling a phase of @@ -33,7 +33,7 @@ @InterfaceAudience.LimitedPrivate({"MapReduce"}) @InterfaceStability.Unstable public class Progress { - private static final Log LOG = LogFactory.getLog(Progress.class); + private static final Logger LOG = LoggerFactory.getLogger(Progress.class); private String status = ""; private float progress; private int currentPhase; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java index e3f0077ac27..41907fbed7d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownHookManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.util; import com.google.common.util.concurrent.ThreadFactoryBuilder; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; @@ -47,7 +47,8 @@ public class ShutdownHookManager { private static final ShutdownHookManager MGR = new ShutdownHookManager(); - private static final Log LOG = LogFactory.getLog(ShutdownHookManager.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShutdownHookManager.class); private static final long TIMEOUT_DEFAULT = 10; private static final TimeUnit TIME_UNIT_DEFAULT = TimeUnit.SECONDS; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java index ffd88fb97ac..5405d7756af 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ShutdownThreadsHelper.java @@ -19,8 +19,8 @@ package org.apache.hadoop.util; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; @@ -29,7 +29,8 @@ * Helper class to shutdown {@link Thread}s and {@link ExecutorService}s. */ public class ShutdownThreadsHelper { - private static Log LOG = LogFactory.getLog(ShutdownThreadsHelper.class); + private static final Logger LOG = + LoggerFactory.getLogger(ShutdownThreadsHelper.class); @VisibleForTesting static final int SHUTDOWN_WAIT_MS = 3000; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java index bba16316d62..dde49775e7a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java @@ -32,11 +32,11 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Plugin to calculate resource information on Linux systems. @@ -44,8 +44,7 @@ @InterfaceAudience.Private @InterfaceStability.Evolving public class SysInfoLinux extends SysInfo { - private static final Log LOG = - LogFactory.getLog(SysInfoLinux.class); + private static final Logger LOG = LoggerFactory.getLogger(SysInfoLinux.class); /** * proc's meminfo virtual file has keys-values in the format diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java index e21adac1b20..2007ab32e75 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java @@ -21,11 +21,11 @@ import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Plugin to calculate resource information on Windows systems. @@ -34,7 +34,8 @@ @InterfaceStability.Evolving public class SysInfoWindows extends SysInfo { - private static final Log LOG = LogFactory.getLog(SysInfoWindows.class); + private static final Logger LOG = + LoggerFactory.getLogger(SysInfoWindows.class); private long vmemSize; private long memSize; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java index 86f523a9fcb..d23a3dc54b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ThreadUtil.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.util; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.classification.InterfaceStability; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @InterfaceStability.Evolving public class ThreadUtil { - private static final Log LOG = LogFactory.getLog(ThreadUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(ThreadUtil.class); /** * Cause the current thread to sleep as close as possible to the provided diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java index ca994d6409a..a43ebfa38b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java @@ -22,11 +22,11 @@ import java.io.InputStream; import java.util.Properties; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.IOUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class returns build information about Hadoop components. @@ -34,7 +34,7 @@ @InterfaceAudience.Public @InterfaceStability.Stable public class VersionInfo { - private static final Log LOG = LogFactory.getLog(VersionInfo.class); + private static final Logger LOG = LoggerFactory.getLogger(VersionInfo.class); private Properties info; @@ -50,7 +50,7 @@ protected VersionInfo(String component) { } info.load(is); } catch (IOException ex) { - LogFactory.getLog(getClass()).warn("Could not read '" + + LoggerFactory.getLogger(getClass()).warn("Could not read '" + versionInfoFile + "', " + ex.toString(), ex); } finally { IOUtils.closeStream(is); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java index d6878670ffe..61eb777d480 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/AsyncGetFuture.java @@ -18,8 +18,8 @@ package org.apache.hadoop.util.concurrent; import com.google.common.util.concurrent.AbstractFuture; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -29,7 +29,8 @@ /** A {@link Future} implemented using an {@link AsyncGet} object. */ public class AsyncGetFuture extends AbstractFuture { - public static final Log LOG = LogFactory.getLog(AsyncGetFuture.class); + public static final Logger LOG = + LoggerFactory.getLogger(AsyncGetFuture.class); private final AtomicBoolean called = new AtomicBoolean(false); private final AsyncGet asyncGet; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java index 3bc9ed9ea92..5f22f93af44 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/ExecutorHelper.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -29,8 +29,8 @@ /** Helper functions for Executors. */ public final class ExecutorHelper { - private static final Log LOG = LogFactory - .getLog(ExecutorHelper.class); + private static final Logger LOG = + LoggerFactory.getLogger(ExecutorHelper.class); static void logThrowableFromAfterExecute(Runnable r, Throwable t) { if (LOG.isDebugEnabled()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java index 8d910b6ccab..78e729b81d3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopScheduledThreadPoolExecutor.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ScheduledThreadPoolExecutor; @@ -32,8 +32,8 @@ public class HadoopScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor { - private static final Log LOG = LogFactory - .getLog(HadoopScheduledThreadPoolExecutor.class); + private static final Logger LOG = LoggerFactory + .getLogger(HadoopScheduledThreadPoolExecutor.class); public HadoopScheduledThreadPoolExecutor(int corePoolSize) { super(corePoolSize); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java index bcf26cb17c7..fa845b75e38 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/concurrent/HadoopThreadPoolExecutor.java @@ -20,8 +20,8 @@ package org.apache.hadoop.util.concurrent; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.concurrent.BlockingQueue; import java.util.concurrent.RejectedExecutionHandler; @@ -34,8 +34,8 @@ * */ public final class HadoopThreadPoolExecutor extends ThreadPoolExecutor { - private static final Log LOG = LogFactory - .getLog(HadoopThreadPoolExecutor.class); + private static final Logger LOG = LoggerFactory + .getLogger(HadoopThreadPoolExecutor.class); public HadoopThreadPoolExecutor(int corePoolSize, int maximumPoolSize, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java index afcf2ac9427..b1964e6f96e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/CLITestHelper.java @@ -18,8 +18,6 @@ package org.apache.hadoop.cli; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.cli.util.*; import org.apache.hadoop.cli.util.CommandExecutor.Result; import org.apache.hadoop.conf.Configuration; @@ -28,6 +26,9 @@ import org.apache.hadoop.util.StringUtils; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; @@ -41,8 +42,8 @@ * Tests for the Command Line Interface (CLI) */ public class CLITestHelper { - private static final Log LOG = - LogFactory.getLog(CLITestHelper.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(CLITestHelper.class.getName()); // In this mode, it runs the command and compares the actual output // with the expected output diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java index f9c8c165edd..9183524a1ad 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/CryptoStreamsTestBase.java @@ -26,8 +26,6 @@ import java.util.EnumSet; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ByteBufferReadable; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSExceptionMessages; @@ -43,9 +41,11 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class CryptoStreamsTestBase { - protected static final Log LOG = LogFactory.getLog( + protected static final Logger LOG = LoggerFactory.getLogger( CryptoStreamsTestBase.class); protected static CryptoCodec codec; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java index 52e547ba405..a85668853fd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java @@ -31,8 +31,6 @@ import java.util.Map; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -46,9 +44,12 @@ import org.junit.Test; import com.google.common.primitives.Longs; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestCryptoCodec { - private static final Log LOG= LogFactory.getLog(TestCryptoCodec.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCryptoCodec.class); private static byte[] key = new byte[16]; private static byte[] iv = new byte[16]; private static final int bufferSize = 4096; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java index 2e208d21762..4c943bb6779 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java @@ -32,8 +32,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; @@ -41,6 +39,8 @@ import com.google.common.base.Supplier; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** *

@@ -48,7 +48,8 @@ *

*/ public abstract class FCStatisticsBaseTest { - private static final Log LOG = LogFactory.getLog(FCStatisticsBaseTest.class); + private static final Logger LOG = + LoggerFactory.getLogger(FCStatisticsBaseTest.class); static protected int blockSize = 512; static protected int numBlocks = 1; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java index 584ca40a3af..080c515a591 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContext.java @@ -17,15 +17,16 @@ */ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.fail; public class TestFileContext { - private static final Log LOG = LogFactory.getLog(TestFileContext.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFileContext.class); @Test public void testDefaultURIWithoutScheme() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java index dd5279d963c..28710e2c3f4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileStatus.java @@ -31,16 +31,16 @@ import java.util.List; import org.junit.Test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFileStatus { - private static final Log LOG = - LogFactory.getLog(TestFileStatus.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestFileStatus.class); /** Values for creating {@link FileStatus} in some tests */ static final int LENGTH = 1; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java index a1e81ec5473..e156ec67b2d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java @@ -48,8 +48,6 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; @@ -61,9 +59,11 @@ import org.junit.Before; import org.junit.Ignore; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFileUtil { - private static final Log LOG = LogFactory.getLog(TestFileUtil.class); + private static final Logger LOG = LoggerFactory.getLogger(TestFileUtil.class); private static final File TEST_DIR = GenericTestUtils.getTestDir("fu"); private static final String FILE = "x"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 9e199ca7f84..3dd325f5307 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -29,17 +29,17 @@ import java.io.File; import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFsShellCopy { - static final Log LOG = LogFactory.getLog(TestFsShellCopy.class); + static final Logger LOG = LoggerFactory.getLogger(TestFsShellCopy.class); static Configuration conf; static FsShell shell; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java index 46c44912bd0..024a0769fe4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java @@ -32,8 +32,6 @@ import java.util.LinkedList; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.shell.FsCommand; import org.apache.hadoop.fs.shell.PathData; @@ -44,14 +42,16 @@ import org.apache.hadoop.util.Shell; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This test validates that chmod, chown, chgrp returning correct exit codes * */ public class TestFsShellReturnCode { - private static final Log LOG = LogFactory - .getLog("org.apache.hadoop.fs.TestFsShellReturnCode"); + private static final Logger LOG = LoggerFactory + .getLogger("org.apache.hadoop.fs.TestFsShellReturnCode"); private static final Configuration conf = new Configuration(); private static FileSystem fileSys; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java index 89c886ef62c..5fe4e39ade8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellTouch.java @@ -21,17 +21,17 @@ import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertThat; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestFsShellTouch { - static final Log LOG = LogFactory.getLog(TestFsShellTouch.class); + static final Logger LOG = LoggerFactory.getLogger(TestFsShellTouch.class); static FsShell shell; static LocalFileSystem lfs; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java index bacdbb73e45..a1aa4de2c0c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystem.java @@ -18,8 +18,6 @@ package org.apache.hadoop.fs; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; @@ -30,6 +28,8 @@ import org.apache.hadoop.util.Progressable; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Method; @@ -48,7 +48,8 @@ @SuppressWarnings("deprecation") public class TestHarFileSystem { - public static final Log LOG = LogFactory.getLog(TestHarFileSystem.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestHarFileSystem.class); /** * FileSystem methods that must not be overwritten by diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java index e7766f3d929..3d202dfd2cf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractBondedFSContract.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs.contract; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URI; @@ -39,8 +39,8 @@ */ public abstract class AbstractBondedFSContract extends AbstractFSContract { - private static final Log LOG = - LogFactory.getLog(AbstractBondedFSContract.class); + private static final Logger LOG = + LoggerFactory.getLogger(AbstractBondedFSContract.class); /** * Pattern for the option for test filesystems from schema diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java index ca017022177..6da51827c8a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/loadGenerator/LoadGenerator.java @@ -32,8 +32,6 @@ import java.util.EnumSet; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CreateFlag; @@ -49,6 +47,8 @@ import org.apache.hadoop.util.ToolRunner; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** The load generator is a tool for testing NameNode behavior under * different client loads. Note there is a subclass of this clas that lets @@ -129,7 +129,7 @@ * -scriptFile : text file to parse for scripted operation */ public class LoadGenerator extends Configured implements Tool { - public static final Log LOG = LogFactory.getLog(LoadGenerator.class); + public static final Logger LOG = LoggerFactory.getLogger(LoadGenerator.class); private volatile static boolean shouldRun = true; protected static Path root = DataGenerator.DEFAULT_ROOT; @@ -341,7 +341,7 @@ private void genFile(Path file, long fileSize) throws IOException { executionTime[WRITE_CLOSE] += (Time.now() - startTime); numOfOps[WRITE_CLOSE]++; } finally { - IOUtils.cleanup(LOG, out); + IOUtils.cleanupWithLogger(LOG, out); } } } @@ -651,7 +651,7 @@ protected static int loadScriptFile(String filename, boolean readLocally) throws System.err.println("Line: " + lineNum + ", " + e.getMessage()); return -1; } finally { - IOUtils.cleanup(LOG, br); + IOUtils.cleanupWithLogger(LOG, br); } // Copy vectors to arrays of values, to avoid autoboxing overhead later diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java index 23750816bd4..764ad2e1e3c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ActiveStandbyElectorTestUtil.java @@ -19,18 +19,18 @@ import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import org.apache.zookeeper.KeeperException.NoNodeException; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.server.ZooKeeperServer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class ActiveStandbyElectorTestUtil { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( ActiveStandbyElectorTestUtil.class); private static final long LOG_INTERVAL_MS = 500; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index 551da56007e..a15c5d123e2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -23,8 +23,6 @@ import java.util.ArrayList; import com.google.protobuf.BlockingService; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB; @@ -38,6 +36,8 @@ import org.mockito.Mockito; import com.google.common.collect.Lists; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.CommonConfigurationKeys.HA_HM_RPC_TIMEOUT_DEFAULT; @@ -46,7 +46,8 @@ * a mock implementation. */ class DummyHAService extends HAServiceTarget { - public static final Log LOG = LogFactory.getLog(DummyHAService.class); + public static final Logger LOG = + LoggerFactory.getLogger(DummyHAService.class); private static final String DUMMY_FENCE_KEY = "dummy.fence.key"; volatile HAServiceState state; HAServiceProtocol proxy, healthMonitorProxy; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java index 04f65764016..0967c1aff55 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java @@ -23,8 +23,6 @@ import java.io.IOException; import java.net.InetSocketAddress; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -39,6 +37,8 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Harness for starting two dummy ZK FailoverControllers, associated with @@ -55,7 +55,8 @@ public class MiniZKFCCluster { private DummySharedResource sharedResource = new DummySharedResource(); - private static final Log LOG = LogFactory.getLog(MiniZKFCCluster.class); + private static final Logger LOG = + LoggerFactory.getLogger(MiniZKFCCluster.class); public MiniZKFCCluster(Configuration conf, ZooKeeperServer zks) { this.conf = conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java index 1d8f48e2d02..0e59aa10046 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java @@ -24,8 +24,6 @@ import java.io.PrintStream; import java.net.InetSocketAddress; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -34,9 +32,11 @@ import com.google.common.base.Charsets; import com.google.common.base.Joiner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestHAAdmin { - private static final Log LOG = LogFactory.getLog(TestHAAdmin.class); + private static final Logger LOG = LoggerFactory.getLogger(TestHAAdmin.class); private HAAdmin tool; private ByteArrayOutputStream errOutBytes = new ByteArrayOutputStream(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java index 6c465437796..8738372fc4b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHealthMonitor.java @@ -23,8 +23,6 @@ import java.net.InetSocketAddress; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; @@ -34,9 +32,11 @@ import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestHealthMonitor { - private static final Log LOG = LogFactory.getLog( + private static final Logger LOG = LoggerFactory.getLogger( TestHealthMonitor.class); /** How many times has createProxy been called */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java index 0e4a1caeef9..70361752633 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestGlobalFilter.java @@ -33,14 +33,14 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGlobalFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java index b8c5e117af1..27d6b07e194 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServer.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.http; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -40,6 +38,8 @@ import org.mockito.internal.util.reflection.Whitebox; import org.mortbay.jetty.Connector; import org.mortbay.util.ajax.JSON; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.servlet.Filter; import javax.servlet.FilterChain; @@ -72,7 +72,7 @@ import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; public class TestHttpServer extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(TestHttpServer.class); + static final Logger LOG = LoggerFactory.getLogger(TestHttpServer.class); private static HttpServer2 server; private static final int MAX_THREADS = 10; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java index d72a958a0bb..afd06acfca4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerLogs.java @@ -18,8 +18,6 @@ package org.apache.hadoop.http; import org.apache.http.HttpStatus; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.http.resource.JerseyResource; @@ -27,12 +25,14 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.HttpURLConnection; import java.net.URL; public class TestHttpServerLogs extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(TestHttpServerLogs.class); + static final Logger LOG = LoggerFactory.getLogger(TestHttpServerLogs.class); private static HttpServer2 server; @BeforeClass diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java index c92944e57ff..07dbc2a7c6e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWebapps.java @@ -19,8 +19,8 @@ import org.junit.Test; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.FileNotFoundException; @@ -28,7 +28,8 @@ * Test webapp loading */ public class TestHttpServerWebapps extends HttpServerFunctionalTest { - private static final Log log = LogFactory.getLog(TestHttpServerWebapps.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestHttpServerWebapps.class); /** * Test that the test server is loadable on the classpath @@ -58,7 +59,7 @@ public void testMissingServerResource() throws Throwable { stop(server); fail("Expected an exception, got " + serverDescription); } catch (FileNotFoundException expected) { - log.debug("Expected exception " + expected, expected); + LOG.debug("Expected exception " + expected, expected); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java index cbdda907d02..908ccd01aaa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpengo.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.http; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.minikdc.MiniKdc; @@ -38,6 +36,8 @@ import org.junit.BeforeClass; import org.junit.Test; import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileWriter; @@ -53,7 +53,8 @@ */ public class TestHttpServerWithSpengo { - static final Log LOG = LogFactory.getLog(TestHttpServerWithSpengo.class); + static final Logger LOG = + LoggerFactory.getLogger(TestHttpServerWithSpengo.class); private static final String SECRET_STR = "secret"; private static final String HTTP_USER = "HTTP"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java index 09f31dff7b3..4c35b391c39 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestPathFilter.java @@ -33,14 +33,14 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestPathFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static final Set RECORDS = new TreeSet(); /** A very simple filter that records accessed uri's */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java index f52a0554417..3c68986dac5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -32,8 +32,6 @@ import javax.net.ssl.SSLSocket; import javax.net.ssl.SSLSocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.IOUtils; @@ -45,6 +43,8 @@ import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This testcase issues SSL certificates configures the HttpServer to serve @@ -56,7 +56,8 @@ public class TestSSLHttpServer extends HttpServerFunctionalTest { private static final String BASEDIR = GenericTestUtils.getTempPath(TestSSLHttpServer.class.getSimpleName()); - private static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSSLHttpServer.class); private static Configuration conf; private static HttpServer2 server; private static String keystoresDir; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java index 6b17ccc84e2..b5136ca2c1c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestServletFilter.java @@ -32,15 +32,15 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestServletFilter extends HttpServerFunctionalTest { - static final Log LOG = LogFactory.getLog(HttpServer2.class); + static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); static volatile String uri = null; /** A very simple filter which record the uri filtered. */ diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java index f1313e26ca9..2203a040181 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/resource/JerseyResource.java @@ -30,9 +30,9 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.mortbay.util.ajax.JSON; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple Jersey resource class TestHttpServer. @@ -41,7 +41,7 @@ */ @Path("") public class JerseyResource { - static final Log LOG = LogFactory.getLog(JerseyResource.class); + static final Logger LOG = LoggerFactory.getLogger(JerseyResource.class); public static final String PATH = "path"; public static final String OP = "op"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java index 505aca736c2..722e9de5958 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java @@ -21,13 +21,15 @@ import java.io.*; -import org.apache.commons.logging.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.conf.*; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertEquals; @@ -37,7 +39,8 @@ /** Support for flat files of binary key/value pairs. */ public class TestArrayFile { - private static final Log LOG = LogFactory.getLog(TestArrayFile.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestArrayFile.class); private static final Path TEST_DIR = new Path(GenericTestUtils.getTempPath( TestMapFile.class.getSimpleName())); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java index bd8f2ef537f..b70e011f6aa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java @@ -21,16 +21,18 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertEquals; public class TestDefaultStringifier { private static Configuration conf = new Configuration(); - private static final Log LOG = LogFactory.getLog(TestDefaultStringifier.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestDefaultStringifier.class); private char[] alphabet = "abcdefghijklmnopqrstuvwxyz".toCharArray(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index e97ab6a5594..044824356ed 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -21,8 +21,6 @@ import java.io.*; import java.util.*; -import org.apache.commons.logging.*; - import org.apache.hadoop.fs.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.Metadata; @@ -40,11 +38,14 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Support for flat files of binary key/value pairs. */ public class TestSequenceFile { - private static final Log LOG = LogFactory.getLog(TestSequenceFile.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSequenceFile.class); private Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java index 1fcfab673c5..b6ec4874583 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java @@ -21,14 +21,13 @@ import java.io.*; import java.util.*; - -import org.apache.commons.logging.*; - import org.apache.hadoop.fs.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; @@ -37,7 +36,7 @@ /** Support for flat files of binary key/value pairs. */ public class TestSetFile { - private static final Log LOG = LogFactory.getLog(TestSetFile.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSetFile.class); private static String FILE = GenericTestUtils.getTempPath("test.set"); private static Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java index 92fb4ec94bc..57359a0b86c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java @@ -20,15 +20,16 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; public class TestWritableUtils { - private static final Log LOG = LogFactory.getLog(TestWritableUtils.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestWritableUtils.class); private void testValue(int val, int vintlen) throws IOException { DataOutputBuffer buf = new DataOutputBuffer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java index 9d16a0de57a..4b99def8638 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java @@ -68,18 +68,19 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Assert; import org.junit.Assume; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.*; import static org.junit.Assume.assumeTrue; public class TestCodec { - private static final Log LOG= LogFactory.getLog(TestCodec.class); + private static final Logger LOG= LoggerFactory.getLogger(TestCodec.class); private Configuration conf = new Configuration(); private int count = 10000; @@ -373,7 +374,7 @@ private static Path writeSplitTestFile(FileSystem fs, Random rand, } LOG.info("Wrote " + seq + " records to " + file); } finally { - IOUtils.cleanup(LOG, fout); + IOUtils.cleanupWithLogger(LOG, fout); CodecPool.returnCompressor(cmp); } return file; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java index 7b55cac2939..645080b078d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java @@ -24,8 +24,6 @@ import java.io.IOException; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; @@ -36,12 +34,15 @@ import org.apache.hadoop.util.ReflectionUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeTrue; public class TestCompressionStreamReuse { - private static final Log LOG = LogFactory - .getLog(TestCompressionStreamReuse.class); + private static final Logger LOG = LoggerFactory + .getLogger(TestCompressionStreamReuse.class); private Configuration conf = new Configuration(); private int count = 10000; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index 0f2c640b1ab..a3901a75409 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -44,8 +44,6 @@ import static org.junit.Assert.*; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileSystem; @@ -54,12 +52,14 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.*; import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat.*; public class TestNativeIO { - static final Log LOG = LogFactory.getLog(TestNativeIO.class); + static final Logger LOG = LoggerFactory.getLogger(TestNativeIO.class); static final File TEST_DIR = GenericTestUtils.getTestDir("testnativeio"); @@ -641,8 +641,8 @@ public void testCopyFileUnbuffered() throws Exception { NativeIO.copyFileUnbuffered(srcFile, dstFile); Assert.assertEquals(srcFile.length(), dstFile.length()); } finally { - IOUtils.cleanup(LOG, channel); - IOUtils.cleanup(LOG, raSrcFile); + IOUtils.cleanupWithLogger(LOG, channel); + IOUtils.cleanupWithLogger(LOG, raSrcFile); FileUtils.deleteQuietly(TEST_DIR); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java index 64abecdbf31..fbe3fb8118e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java @@ -27,14 +27,15 @@ import org.junit.Before; import org.junit.Test; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestSharedFileDescriptorFactory { - static final Log LOG = LogFactory.getLog(TestSharedFileDescriptorFactory.class); + static final Logger LOG = + LoggerFactory.getLogger(TestSharedFileDescriptorFactory.class); private static final File TEST_BASE = GenericTestUtils.getTestDir(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java index 8fc852a7157..2fe1271c003 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestAsyncIPC.java @@ -18,8 +18,6 @@ package org.apache.hadoop.ipc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.LongWritable; @@ -34,6 +32,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; @@ -49,7 +49,7 @@ public class TestAsyncIPC { private static Configuration conf; - private static final Log LOG = LogFactory.getLog(TestAsyncIPC.class); + private static final Logger LOG = LoggerFactory.getLogger(TestAsyncIPC.class); static AsyncGetFuture getAsyncRpcResponseFuture() { @@ -185,7 +185,7 @@ public void run() { final long param = TestIPC.RANDOM.nextLong(); runCall(i, param); } catch (Exception e) { - LOG.fatal(String.format("Caller-%d Call-%d caught: %s", callerId, i, + LOG.error(String.format("Caller-%d Call-%d caught: %s", callerId, i, StringUtils.stringifyException(e))); failed = true; } @@ -222,7 +222,7 @@ private void waitForReturnValues(final int start, final int end) for (int i = start; i < end; i++) { LongWritable value = returnFutures.get(i).get(); if (expectedValues.get(i) != value.get()) { - LOG.fatal(String.format("Caller-%d Call-%d failed!", callerId, i)); + LOG.error(String.format("Caller-%d Call-%d failed!", callerId, i)); failed = true; break; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index 63af6fc6f5a..2dce0477e78 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -58,8 +58,6 @@ import javax.net.SocketFactory; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -96,12 +94,13 @@ import com.google.common.base.Supplier; import com.google.common.primitives.Bytes; import com.google.common.primitives.Ints; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; /** Unit tests for IPC. */ public class TestIPC { - public static final Log LOG = - LogFactory.getLog(TestIPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestIPC.class); private static Configuration conf; final static int PING_INTERVAL = 1000; @@ -230,12 +229,12 @@ public void run() { final long param = RANDOM.nextLong(); LongWritable value = call(client, param, server, conf); if (value.get() != param) { - LOG.fatal("Call failed!"); + LOG.error("Call failed!"); failed = true; break; } } catch (Exception e) { - LOG.fatal("Caught: " + StringUtils.stringifyException(e)); + LOG.error("Caught: " + StringUtils.stringifyException(e)); failed = true; } } @@ -784,7 +783,7 @@ public void run() { call(client, new LongWritable(Thread.currentThread().getId()), addr, 60000, conf); } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); failures.incrementAndGet(); return; } finally { @@ -895,7 +894,7 @@ public void run() { callBarrier.await(); } } catch (Throwable t) { - LOG.error(t); + LOG.error(t.toString()); error.set(true); } } @@ -917,7 +916,7 @@ public void run() { callReturned.countDown(); Thread.sleep(10000); } catch (IOException e) { - LOG.error(e); + LOG.error(e.toString()); } catch (InterruptedException e) { } finally { client.stop(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java index 546cb8f472e..ea55f3367ec 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPCServerResponder.java @@ -32,8 +32,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.BytesWritable; @@ -45,6 +43,8 @@ import org.apache.hadoop.net.NetUtils; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This test provokes partial writes in the server, which is @@ -52,8 +52,8 @@ */ public class TestIPCServerResponder { - public static final Log LOG = - LogFactory.getLog(TestIPCServerResponder.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestIPCServerResponder.class); private static Configuration conf = new Configuration(); @@ -126,7 +126,7 @@ public void run() { call(client, param, address); Thread.sleep(RANDOM.nextInt(20)); } catch (Exception e) { - LOG.fatal("Caught Exception", e); + LOG.error("Caught Exception", e); failed = true; } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java index f5fefe48d05..476b1979b2a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java @@ -29,18 +29,18 @@ import com.google.protobuf.BlockingService; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.protobuf.TestProtos; import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcHandoffProto; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestProtoBufRpcServerHandoff { - public static final Log LOG = - LogFactory.getLog(TestProtoBufRpcServerHandoff.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestProtoBufRpcServerHandoff.class); @Test(timeout = 20000) public void test() throws Exception { @@ -83,8 +83,8 @@ public void test() throws Exception { ClientInvocationCallable callable1 = future1.get(); ClientInvocationCallable callable2 = future2.get(); - LOG.info(callable1); - LOG.info(callable2); + LOG.info(callable1.toString()); + LOG.info(callable2.toString()); // Ensure the 5 second sleep responses are within a reasonable time of each // other. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index 9ca662d5d31..d08c4b4c33a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -20,8 +20,6 @@ import com.google.common.base.Supplier; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -54,6 +52,8 @@ import org.junit.Test; import org.mockito.Mockito; import org.mockito.internal.util.reflection.Whitebox; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import javax.net.SocketFactory; @@ -104,7 +104,7 @@ @SuppressWarnings("deprecation") public class TestRPC extends TestRpcBase { - public static final Log LOG = LogFactory.getLog(TestRPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestRPC.class); @Before public void setup() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index 2ac2be990d5..2984efdd9b5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -28,8 +28,6 @@ import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto; import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto; @@ -39,6 +37,8 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Unit test for supporting method-name based compatible RPCs. */ public class TestRPCCompatibility { @@ -47,8 +47,8 @@ public class TestRPCCompatibility { private static RPC.Server server; private ProtocolProxy proxy; - public static final Log LOG = - LogFactory.getLog(TestRPCCompatibility.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRPCCompatibility.class); private static Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java index 93af7d4aad8..aee88935383 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCServerShutdown.java @@ -19,11 +19,11 @@ package org.apache.hadoop.ipc; import com.google.protobuf.ServiceException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; @@ -42,7 +42,8 @@ @SuppressWarnings("deprecation") public class TestRPCServerShutdown extends TestRpcBase { - public static final Log LOG = LogFactory.getLog(TestRPCServerShutdown.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRPCServerShutdown.class); @Before public void setup() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java index 3716bc3667e..2e0b3daa220 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcServerHandoff.java @@ -30,19 +30,19 @@ import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.net.NetUtils; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestRpcServerHandoff { - public static final Log LOG = - LogFactory.getLog(TestRpcServerHandoff.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestRpcServerHandoff.class); private static final String BIND_ADDRESS = "0.0.0.0"; private static final Configuration conf = new Configuration(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 87210e8d76a..8e9aa5ca251 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -20,8 +20,6 @@ import com.google.protobuf.ServiceException; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -43,6 +41,8 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import javax.security.auth.callback.*; @@ -95,8 +95,7 @@ public TestSaslRPC(QualityOfProtection[] qop, this.saslPropertiesResolver = saslPropertiesResolver; } - public static final Log LOG = - LogFactory.getLog(TestSaslRPC.class); + public static final Logger LOG = LoggerFactory.getLogger(TestSaslRPC.class); static final String ERROR_MESSAGE = "Token is invalid"; static final String SERVER_KEYTAB_KEY = "test.ipc.server.keytab"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java index afda5355da6..31415277ebc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestServer.java @@ -27,12 +27,12 @@ import java.net.InetSocketAddress; import java.net.ServerSocket; -import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.Server.Call; import org.junit.Test; +import org.slf4j.Logger; /** * This is intended to be a set of unit tests for the @@ -136,7 +136,7 @@ static class TestException3 extends Exception { public void testLogExceptions() throws Exception { final Configuration conf = new Configuration(); final Call dummyCall = new Call(0, 0, null, null); - Log logger = mock(Log.class); + Logger logger = mock(Logger.class); Server server = new Server("0.0.0.0", 0, LongWritable.class, 1, conf) { @Override public Writable call( @@ -154,12 +154,12 @@ public Writable call( // No stack trace should be logged for a terse exception. server.logException(logger, new TestException2(), dummyCall); - verify(logger, times(1)).info(anyObject()); + verify(logger, times(1)).info(anyString()); // Full stack trace should be logged for other exceptions. final Throwable te3 = new TestException3(); server.logException(logger, te3, dummyCall); - verify(logger, times(1)).info(anyObject(), eq(te3)); + verify(logger, times(1)).info(anyString(), eq(te3)); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java index 642817617e5..d4bc06ad3c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestWeightedRoundRobinMultiplexer.java @@ -21,14 +21,15 @@ import static org.junit.Assert.assertEquals; import org.junit.Test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.ipc.WeightedRoundRobinMultiplexer.IPC_CALLQUEUE_WRRMUX_WEIGHTS_KEY; public class TestWeightedRoundRobinMultiplexer { - public static final Log LOG = LogFactory.getLog(TestWeightedRoundRobinMultiplexer.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestWeightedRoundRobinMultiplexer.class); private WeightedRoundRobinMultiplexer mux; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java index 7e094edc097..7bc772f062a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java @@ -31,8 +31,6 @@ import java.util.List; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; @@ -47,9 +45,12 @@ import org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31; import org.apache.hadoop.metrics2.sink.ganglia.GangliaMetricsTestHelper; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGangliaMetrics { - public static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestMetricsSystemImpl.class); // This is the prefix to locate the config file for this particular test // This is to avoid using the same config file with other test cases, // which can cause race conditions. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java index 3bdc8af1e61..18b8309d4bf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsConfig.java @@ -24,15 +24,16 @@ import static org.junit.Assert.*; import org.apache.commons.configuration.Configuration; -import org.apache.commons.logging.LogFactory; -import org.apache.commons.logging.Log; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.impl.ConfigUtil.*; /** * Test metrics configuration */ public class TestMetricsConfig { - static final Log LOG = LogFactory.getLog(TestMetricsConfig.class); + static final Logger LOG = LoggerFactory.getLogger(TestMetricsConfig.class); /** * Common use cases diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java index 0463f48c5d5..5cc7697d1c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestMetricsSystemImpl.java @@ -42,8 +42,6 @@ import com.google.common.collect.Iterables; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsException; import static org.apache.hadoop.test.MoreAsserts.*; @@ -61,13 +59,16 @@ import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test the MetricsSystemImpl class */ @RunWith(MockitoJUnitRunner.class) public class TestMetricsSystemImpl { - private static final Log LOG = LogFactory.getLog(TestMetricsSystemImpl.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMetricsSystemImpl.class); static { DefaultMetricsSystem.setMiniClusterMode(true); } @@ -78,7 +79,7 @@ public class TestMetricsSystemImpl { public static class TestSink implements MetricsSink { @Override public void putMetrics(MetricsRecord record) { - LOG.debug(record); + LOG.debug(record.toString()); } @Override public void flush() {} @@ -420,7 +421,7 @@ public void flush() { } private void checkMetricsRecords(List recs) { - LOG.debug(recs); + LOG.debug(recs.toString()); MetricsRecord r = recs.get(0); assertEquals("name", "s1rec", r.name()); assertEquals("tags", new MetricsTag[] { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java index 7da8d1b8c63..719130f5ba9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestSinkQueue.java @@ -22,18 +22,20 @@ import java.util.concurrent.CountDownLatch; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import static org.apache.hadoop.metrics2.impl.SinkQueue.*; /** * Test the half-blocking metrics sink queue */ public class TestSinkQueue { - private static final Log LOG = LogFactory.getLog(TestSinkQueue.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestSinkQueue.class); /** * Test common use case @@ -234,7 +236,7 @@ private void shouldThrowCME(Fun callback) throws Exception { callback.run(); } catch (ConcurrentModificationException e) { - LOG.info(e); + LOG.info(e.toString()); return; } LOG.error("should've thrown CME"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java index fd716ae7116..b0d7debe44a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/lib/TestMutableMetrics.java @@ -34,18 +34,19 @@ import java.util.Random; import java.util.concurrent.CountDownLatch; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.util.Quantile; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Test metrics record builder interface and mutable metrics */ public class TestMutableMetrics { - private static final Log LOG = LogFactory.getLog(TestMutableMetrics.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMutableMetrics.class); private final double EPSILON = 1e-42; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java index 7bee3a2008d..e69947ecdc2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMetricsCache.java @@ -25,15 +25,17 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.MetricsRecord; import org.apache.hadoop.metrics2.MetricsTag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.apache.hadoop.metrics2.lib.Interns.*; public class TestMetricsCache { - private static final Log LOG = LogFactory.getLog(TestMetricsCache.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestMetricsCache.class); @SuppressWarnings("deprecation") @Test public void testUpdate() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java index a294e745ee0..80f2ebc98ce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/ServerSocketUtil.java @@ -18,16 +18,17 @@ package org.apache.hadoop.net; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.net.ServerSocket; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - public class ServerSocketUtil { - private static final Log LOG = LogFactory.getLog(ServerSocketUtil.class); + private static final Logger LOG = + LoggerFactory.getLogger(ServerSocketUtil.class); private static Random rand = new Random(); /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java index a0bfe73f9ae..663198a4dbe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java @@ -28,12 +28,12 @@ import javax.naming.CommunicationException; import javax.naming.NameNotFoundException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Time; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.core.Is.is; @@ -45,7 +45,7 @@ */ public class TestDNS { - private static final Log LOG = LogFactory.getLog(TestDNS.class); + private static final Logger LOG = LoggerFactory.getLogger(TestDNS.class); private static final String DEFAULT = "default"; // This is not a legal hostname (starts with a hyphen). It will never diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java index e59ac779828..1c56c6017a4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java @@ -40,8 +40,6 @@ import junit.framework.AssertionFailedError; import org.apache.commons.lang.StringUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.NetUtilsTestResolver; @@ -49,10 +47,12 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestNetUtils { - private static final Log LOG = LogFactory.getLog(TestNetUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestNetUtils.class); private static final int DEST_PORT = 4040; private static final String DEST_PORT_NAME = Integer.toString(DEST_PORT); private static final int LOCAL_PORT = 8080; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java index 649ba1264b7..f1c03cf5df4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestSocketIOWithTimeout.java @@ -25,8 +25,6 @@ import java.nio.channels.Pipe; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MultithreadedTestUtil; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; @@ -36,6 +34,9 @@ import org.apache.hadoop.io.nativeio.NativeIO; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import static org.junit.Assert.*; /** @@ -47,7 +48,8 @@ */ public class TestSocketIOWithTimeout { - static Log LOG = LogFactory.getLog(TestSocketIOWithTimeout.class); + static final Logger LOG = + LoggerFactory.getLogger(TestSocketIOWithTimeout.class); private static int TIMEOUT = 1*1000; private static String TEST_STRING = "1234567890"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java index f3c0a5cb255..a906c4aa856 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestStaticMapping.java @@ -18,12 +18,12 @@ package org.apache.hadoop.net; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; @@ -34,7 +34,8 @@ * Because the map is actually static, this map needs to be reset for every test */ public class TestStaticMapping extends Assert { - private static final Log LOG = LogFactory.getLog(TestStaticMapping.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestStaticMapping.class); /** * Reset the map then create a new instance of the {@link StaticMapping} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java index 8a5a0a42250..28b3cbe3faf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocket.java @@ -203,7 +203,7 @@ public Void call() { } catch (IOException e) { throw new RuntimeException("unexpected IOException", e); } finally { - IOUtils.cleanup(DomainSocket.LOG, serverConn); + IOUtils.cleanupWithLogger(DomainSocket.LOG, serverConn); } return null; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java index 4cc86a7a421..aa522f26601 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/unix/TestDomainSocketWatcher.java @@ -26,17 +26,18 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.ReentrantLock; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Test; import com.google.common.util.concurrent.Uninterruptibles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestDomainSocketWatcher { - static final Log LOG = LogFactory.getLog(TestDomainSocketWatcher.class); + static final Logger LOG = + LoggerFactory.getLogger(TestDomainSocketWatcher.class); private Throwable trappedException = null; @@ -141,7 +142,7 @@ public boolean handle(DomainSocket sock) { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -169,7 +170,7 @@ public void run() { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -212,7 +213,7 @@ public boolean handle(DomainSocket sock) { TimeUnit.MILLISECONDS.sleep(1); } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } @@ -241,7 +242,7 @@ public void run() { } } } catch (Throwable e) { - LOG.error(e); + LOG.error(e.toString()); throw new RuntimeException(e); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java index 79f56e065a9..0a2d42c2732 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestCompositeGroupMapping.java @@ -24,16 +24,17 @@ import java.util.Arrays; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestCompositeGroupMapping { - public static final Log LOG = LogFactory.getLog(TestCompositeGroupMapping.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestCompositeGroupMapping.class); private static Configuration conf = new Configuration(); private static class TestUser { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index 50d389c6465..8cfd9fc0ebc 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -44,8 +44,9 @@ import org.apache.hadoop.ipc.TestRpcBase.TestTokenSecretManager; import org.apache.hadoop.ipc.TestRpcBase.TestTokenIdentifier; import org.apache.hadoop.ipc.TestRpcBase.TestTokenSelector; -import org.apache.commons.logging.*; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * @@ -63,8 +64,8 @@ public class TestDoAsEffectiveUser { private static final Configuration masterConf = new Configuration(); - public static final Log LOG = LogFactory - .getLog(TestDoAsEffectiveUser.class); + public static final Logger LOG = LoggerFactory + .getLogger(TestDoAsEffectiveUser.class); static { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java index 85f17b16392..3ef36984951 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupFallback.java @@ -21,16 +21,17 @@ import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; public class TestGroupFallback { - public static final Log LOG = LogFactory.getLog(TestGroupFallback.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestGroupFallback.class); @Test public void testGroupShell() throws Exception { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java index d51d63bf590..f015021a118 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestGroupsCaching.java @@ -40,17 +40,17 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.ShellBasedUnixGroupsMapping; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestGroupsCaching { - public static final Log TESTLOG = LogFactory.getLog(TestGroupsCaching.class); + public static final Logger TESTLOG = + LoggerFactory.getLogger(TestGroupsCaching.class); private static String[] myGroups = {"grp1", "grp2"}; private Configuration conf; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java index 6d9ea0853f5..d3c95386412 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedUnixGroupsMapping.java @@ -20,8 +20,6 @@ import java.io.IOException; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.test.GenericTestUtils; @@ -30,6 +28,8 @@ import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; import static org.mockito.Mockito.doNothing; @@ -38,8 +38,8 @@ import static org.mockito.Mockito.when; public class TestShellBasedUnixGroupsMapping { - private static final Log TESTLOG = - LogFactory.getLog(TestShellBasedUnixGroupsMapping.class); + private static final Logger TESTLOG = + LoggerFactory.getLogger(TestShellBasedUnixGroupsMapping.class); private final GenericTestUtils.LogCapturer shellMappingLog = GenericTestUtils.LogCapturer.captureLogs( diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java index 2c866ae4004..ee7e42cb1da 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java @@ -23,8 +23,6 @@ import java.util.List; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -39,13 +37,16 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestCredentialProviderFactory { - public static final Log LOG = LogFactory.getLog(TestCredentialProviderFactory.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestCredentialProviderFactory.class); @Rule public final TestName test = new TestName(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java index 04efc3306e4..fe00949e3c8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestAccessControlList.java @@ -26,8 +26,6 @@ import java.util.Iterator; import java.util.List; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -36,6 +34,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.NativeCodeLoader; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; @@ -45,8 +45,8 @@ @InterfaceStability.Evolving public class TestAccessControlList { - private static final Log LOG = - LogFactory.getLog(TestAccessControlList.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestAccessControlList.class); /** * Test the netgroups (groups in ACL rules that start with @) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java index 577f11b9296..9061fe752c8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java @@ -25,8 +25,6 @@ import java.util.Arrays; import java.util.Collection; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.Groups; @@ -34,11 +32,13 @@ import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.StringUtils; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestProxyUsers { - private static final Log LOG = - LogFactory.getLog(TestProxyUsers.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestProxyUsers.class); private static final String REAL_USER_NAME = "proxier"; private static final String PROXY_USER_NAME = "proxied_user"; private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java index b41ff152510..ad12f0babac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestDelegationToken.java @@ -32,8 +32,6 @@ import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; @@ -49,11 +47,14 @@ import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Time; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; public class TestDelegationToken { - private static final Log LOG = LogFactory.getLog(TestDelegationToken.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestDelegationToken.class); private static final Text KIND = new Text("MY KIND"); public static class TestDelegationTokenIdentifier diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java index 6189c0ed19f..ad3dfcf0c5d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestCompositeService.java @@ -18,12 +18,12 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.Service.STATE; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -36,7 +36,8 @@ public class TestCompositeService { private static final int FAILED_SERVICE_SEQ_NUMBER = 2; - private static final Log LOG = LogFactory.getLog(TestCompositeService.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestCompositeService.class); /** * flag to state policy of CompositeService, and hence diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java index cf9ca325249..f72e130d750 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java @@ -19,8 +19,6 @@ package org.apache.hadoop.service; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.LoggingStateChangeListener; @@ -28,9 +26,12 @@ import org.apache.hadoop.service.ServiceStateChangeListener; import org.apache.hadoop.service.ServiceStateException; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestServiceLifecycle extends ServiceAssert { - private static Log LOG = LogFactory.getLog(TestServiceLifecycle.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestServiceLifecycle.class); /** * Walk the {@link BreakableService} through it's lifecycle, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java index d84c9c8f294..21d38be0d36 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MetricsAsserts.java @@ -32,8 +32,6 @@ import org.mockito.ArgumentCaptor; import org.mockito.ArgumentMatcher; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsSource; @@ -42,6 +40,8 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.MutableQuantiles; import org.apache.hadoop.metrics2.util.Quantile; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.metrics2.lib.Interns.*; @@ -50,7 +50,7 @@ */ public class MetricsAsserts { - final static Log LOG = LogFactory.getLog(MetricsAsserts.class); + final static Logger LOG = LoggerFactory.getLogger(MetricsAsserts.class); private static final double EPSILON = 0.00001; public static MetricsSystem mockMetricsSystem() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java index e0bc1368b83..514e9c0df60 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/MultithreadedTestUtil.java @@ -20,9 +20,9 @@ import java.util.HashSet; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A utility to easily test threaded/synchronized code. @@ -60,8 +60,8 @@ */ public abstract class MultithreadedTestUtil { - public static final Log LOG = - LogFactory.getLog(MultithreadedTestUtil.class); + public static final Logger LOG = + LoggerFactory.getLogger(MultithreadedTestUtil.class); /** * TestContext is used to setup the multithreaded test runner. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java index d6ae04d71ba..4c0b965a973 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TestJUnitSetup.java @@ -18,13 +18,14 @@ package org.apache.hadoop.test; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestJUnitSetup { - public static final Log LOG = LogFactory.getLog(TestJUnitSetup.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestJUnitSetup.class); @Test public void testJavaAssert() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java index 075ef69fd37..58935f24e8e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestAsyncDiskService.java @@ -19,17 +19,18 @@ import junit.framework.TestCase; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.AsyncDiskService; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A test for AsyncDiskService. */ public class TestAsyncDiskService extends TestCase { - public static final Log LOG = LogFactory.getLog(TestAsyncDiskService.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestAsyncDiskService.class); // Access by multiple threads from the ThreadPools in AsyncDiskService. volatile int count; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java index a38c3d764af..e6a6cb18b16 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java @@ -28,21 +28,22 @@ import java.util.jar.JarFile; import java.util.jar.Manifest; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests covering the classpath command-line utility. */ public class TestClasspath { - private static final Log LOG = LogFactory.getLog(TestClasspath.class); + private static final Logger LOG = + LoggerFactory.getLogger(TestClasspath.class); private static final File TEST_DIR = GenericTestUtils.getTestDir( "TestClasspath"); private static final Charset UTF8 = Charset.forName("UTF-8"); @@ -75,7 +76,7 @@ public void setUp() { public void tearDown() { System.setOut(oldStdout); System.setErr(oldStderr); - IOUtils.cleanup(LOG, printStdout, printStderr); + IOUtils.cleanupWithLogger(LOG, printStdout, printStderr); assertTrue(FileUtil.fullyDelete(TEST_DIR)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java index bd748552980..2c27b762c45 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestIdentityHashStore.java @@ -23,14 +23,15 @@ import org.junit.Assert; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.IdentityHashStore; import org.apache.hadoop.util.IdentityHashStore.Visitor; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestIdentityHashStore { - private static final Log LOG = LogFactory.getLog(TestIdentityHashStore.class.getName()); + private static final Logger LOG = + LoggerFactory.getLogger(TestIdentityHashStore.class.getName()); private static class Key { private final String name; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java index 671dd37cf47..3751253062c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightGSet.java @@ -21,15 +21,16 @@ import java.util.Iterator; import java.util.Random; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.LightWeightGSet.LinkedElement; import org.junit.Assert; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Testing {@link LightWeightGSet} */ public class TestLightWeightGSet { - public static final Log LOG = LogFactory.getLog(TestLightWeightGSet.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestLightWeightGSet.class); private static ArrayList getRandomList(int length, int randomSeed) { Random random = new Random(randomSeed); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java index 32500922a3e..19f213d31a1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestLightWeightResizableGSet.java @@ -23,15 +23,16 @@ import java.util.Random; import java.util.Set; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.Assert.*; /** Testing {@link LightWeightResizableGSet} */ public class TestLightWeightResizableGSet { - public static final Log LOG = LogFactory.getLog(TestLightWeightResizableGSet.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestLightWeightResizableGSet.class); private Random random = new Random(); private TestElement[] generateElements(int length) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java index 473c17738eb..58874fdcdfb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestNativeCodeLoader.java @@ -20,16 +20,16 @@ import org.junit.Test; import static org.junit.Assert.*; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.crypto.OpensslCipher; import org.apache.hadoop.io.compress.Lz4Codec; import org.apache.hadoop.io.compress.SnappyCodec; import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.NativeCodeLoader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestNativeCodeLoader { - static final Log LOG = LogFactory.getLog(TestNativeCodeLoader.class); + static final Logger LOG = LoggerFactory.getLogger(TestNativeCodeLoader.class); private static boolean requireTestJni() { String rtj = System.getProperty("require.test.libhadoop"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java index 73323eaa7a1..a9fa4c64e9b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestSignalLogger.java @@ -19,21 +19,22 @@ package org.apache.hadoop.util; import org.apache.commons.lang.SystemUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.junit.Assert; import org.junit.Assume; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class TestSignalLogger { - public static final Log LOG = LogFactory.getLog(TestSignalLogger.class); + public static final Logger LOG = + LoggerFactory.getLogger(TestSignalLogger.class); @Test(timeout=60000) public void testInstall() throws Exception { Assume.assumeTrue(SystemUtils.IS_OS_UNIX); - SignalLogger.INSTANCE.register(LOG); + SignalLogger.INSTANCE.register(LogAdapter.create(LOG)); try { - SignalLogger.INSTANCE.register(LOG); + SignalLogger.INSTANCE.register(LogAdapter.create(LOG)); Assert.fail("expected IllegalStateException from double registration"); } catch (IllegalStateException e) { // fall through diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java index cfa97f4a099..3e3bf072116 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java @@ -29,13 +29,13 @@ import java.io.IOException; import org.apache.commons.io.FileUtils; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.hamcrest.CoreMatchers.*; @@ -44,7 +44,7 @@ */ public class TestWinUtils { - private static final Log LOG = LogFactory.getLog(TestWinUtils.class); + private static final Logger LOG = LoggerFactory.getLogger(TestWinUtils.class); private static File TEST_DIR = GenericTestUtils.getTestDir( TestWinUtils.class.getSimpleName()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java index 1ef00856db3..8c13b4f4525 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountdBase.java @@ -19,13 +19,13 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.SimpleTcpServer; import org.apache.hadoop.oncrpc.SimpleUdpServer; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.util.ExitUtil.terminate; @@ -37,7 +37,7 @@ * handle for requested directory and returns it to the client. */ abstract public class MountdBase { - public static final Log LOG = LogFactory.getLog(MountdBase.class); + public static final Logger LOG = LoggerFactory.getLogger(MountdBase.class); private final RpcProgram rpcProgram; private int udpBoundPort; // Will set after server starts private int tcpBoundPort; // Will set after server starts @@ -63,7 +63,7 @@ private void startUDPServer() { try { udpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the UDP server.", e); + LOG.error("Failed to start the UDP server.", e); if (udpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_UDP, udpServer.getBoundPort()); @@ -82,7 +82,7 @@ private void startTCPServer() { try { tcpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the TCP server.", e); + LOG.error("Failed to start the TCP server.", e); if (tcpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpServer.getBoundPort()); @@ -103,7 +103,7 @@ public void start(boolean register) { rpcProgram.register(PortmapMapping.TRANSPORT_UDP, udpBoundPort); rpcProgram.register(PortmapMapping.TRANSPORT_TCP, tcpBoundPort); } catch (Throwable e) { - LOG.fatal("Failed to register the MOUNT service.", e); + LOG.error("Failed to register the MOUNT service.", e); terminate(1, e); } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java index a299ff08d4b..3d5088d7006 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.regex.Pattern; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.commons.net.util.SubnetUtils; import org.apache.commons.net.util.SubnetUtils.SubnetInfo; import org.apache.hadoop.conf.Configuration; @@ -35,6 +33,8 @@ import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides functionality for loading and checking the mapping @@ -64,7 +64,7 @@ public static synchronized NfsExports getInstance(Configuration conf) { return exports; } - public static final Log LOG = LogFactory.getLog(NfsExports.class); + public static final Logger LOG = LoggerFactory.getLogger(NfsExports.class); // only support IPv4 now private static final String IP_ADDRESS = diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java index bff5eecee70..5b327986f14 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java @@ -23,9 +23,9 @@ import java.security.NoSuchAlgorithmException; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This is a file handle use by the NFS clients. @@ -33,7 +33,7 @@ * on subsequent operations to reference the file. */ public class FileHandle { - private static final Log LOG = LogFactory.getLog(FileHandle.class); + private static final Logger LOG = LoggerFactory.getLogger(FileHandle.class); private static final String HEXES = "0123456789abcdef"; private static final int HANDLE_LEN = 32; private byte[] handle; // Opaque handle diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java index 80faca56f6c..00e6d9f70c2 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3Base.java @@ -17,13 +17,13 @@ */ package org.apache.hadoop.nfs.nfs3; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.SimpleTcpServer; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.util.ShutdownHookManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.util.ExitUtil.terminate; @@ -32,7 +32,7 @@ * Only TCP server is supported and UDP is not supported. */ public abstract class Nfs3Base { - public static final Log LOG = LogFactory.getLog(Nfs3Base.class); + public static final Logger LOG = LoggerFactory.getLogger(Nfs3Base.class); private final RpcProgram rpcProgram; private int nfsBoundPort; // Will set after server starts @@ -54,7 +54,7 @@ public void start(boolean register) { try { rpcProgram.register(PortmapMapping.TRANSPORT_TCP, nfsBoundPort); } catch (Throwable e) { - LOG.fatal("Failed to register the NFSv3 service.", e); + LOG.error("Failed to register the NFSv3 service.", e); terminate(1, e); } } @@ -67,7 +67,7 @@ private void startTCPServer() { try { tcpServer.run(); } catch (Throwable e) { - LOG.fatal("Failed to start the TCP server.", e); + LOG.error("Failed to start the TCP server.", e); if (tcpServer.getBoundPort() > 0) { rpcProgram.unregister(PortmapMapping.TRANSPORT_TCP, tcpServer.getBoundPort()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java index 7ba37c98109..c8528ba4d55 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RegistrationClient.java @@ -19,18 +19,19 @@ import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple client that registers an RPC program with portmap. */ public class RegistrationClient extends SimpleTcpClient { - public static final Log LOG = LogFactory.getLog(RegistrationClient.class); + public static final Logger LOG = + LoggerFactory.getLogger(RegistrationClient.class); public RegistrationClient(String host, int port, XDR request) { super(host, port, request); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java index aa4b948d582..0ae3c236172 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java @@ -17,17 +17,17 @@ */ package org.apache.hadoop.oncrpc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.security.Credentials; import org.apache.hadoop.oncrpc.security.Verifier; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents an RPC message of type RPC call as defined in RFC 1831 */ public class RpcCall extends RpcMessage { public static final int RPC_VERSION = 2; - private static final Log LOG = LogFactory.getLog(RpcCall.class); + private static final Logger LOG = LoggerFactory.getLogger(RpcCall.class); public static RpcCall read(XDR xdr) { return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()), @@ -60,7 +60,7 @@ protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion, this.credentials = credential; this.verifier = verifier; if (LOG.isTraceEnabled()) { - LOG.trace(this); + LOG.trace(this.toString()); } validate(); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java index c541cd660b4..5c059aa4550 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java @@ -23,8 +23,6 @@ import java.net.SocketAddress; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState; import org.apache.hadoop.oncrpc.security.Verifier; import org.apache.hadoop.oncrpc.security.VerifierNone; @@ -35,13 +33,15 @@ import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Class for writing RPC server programs based on RFC 1050. Extend this class * and implement {@link #handleInternal} to handle the requests received. */ public abstract class RpcProgram extends SimpleChannelUpstreamHandler { - static final Log LOG = LogFactory.getLog(RpcProgram.class); + static final Logger LOG = LoggerFactory.getLogger(RpcProgram.class); public static final int RPCB_PORT = 111; private final String program; private final String host; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java index cbc9943eaa8..cebebd27d0c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcUtil.java @@ -19,8 +19,6 @@ import java.nio.ByteBuffer; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; @@ -29,6 +27,8 @@ import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.frame.FrameDecoder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public final class RpcUtil { /** @@ -63,7 +63,8 @@ public static FrameDecoder constructRpcFrameDecoder() { * each RPC client. */ static class RpcFrameDecoder extends FrameDecoder { - public static final Log LOG = LogFactory.getLog(RpcFrameDecoder.class); + public static final Logger LOG = + LoggerFactory.getLogger(RpcFrameDecoder.class); private ChannelBuffer currentFrame; @Override @@ -107,8 +108,8 @@ protected Object decode(ChannelHandlerContext ctx, Channel channel, * request into a RpcInfo instance. */ static final class RpcMessageParserStage extends SimpleChannelUpstreamHandler { - private static final Log LOG = LogFactory - .getLog(RpcMessageParserStage.class); + private static final Logger LOG = LoggerFactory + .getLogger(RpcMessageParserStage.class); @Override public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java index b72153a312c..23b6682361c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpClientHandler.java @@ -17,20 +17,21 @@ */ package org.apache.hadoop.oncrpc; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A simple TCP based RPC client handler used by {@link SimpleTcpServer}. */ public class SimpleTcpClientHandler extends SimpleChannelHandler { - public static final Log LOG = LogFactory.getLog(SimpleTcpClient.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleTcpClient.class); protected final XDR request; public SimpleTcpClientHandler(XDR request) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java index bd48b158589..177fa3d80b1 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleTcpServer.java @@ -20,8 +20,6 @@ import java.net.InetSocketAddress; import java.util.concurrent.Executors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; @@ -30,12 +28,15 @@ import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Simple UDP server implemented using netty. */ public class SimpleTcpServer { - public static final Log LOG = LogFactory.getLog(SimpleTcpServer.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleTcpServer.class); protected final int port; protected int boundPort = -1; // Will be set after server starts protected final SimpleChannelUpstreamHandler rpcProgram; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java index d691abad358..e65003ca64b 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/SimpleUdpServer.java @@ -20,20 +20,21 @@ import java.net.InetSocketAddress; import java.util.concurrent.Executors; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.jboss.netty.bootstrap.ConnectionlessBootstrap; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.socket.DatagramChannelFactory; import org.jboss.netty.channel.socket.nio.NioDatagramChannelFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Simple UDP server implemented based on netty. */ public class SimpleUdpServer { - public static final Log LOG = LogFactory.getLog(SimpleUdpServer.class); + public static final Logger LOG = + LoggerFactory.getLogger(SimpleUdpServer.class); private final int SEND_BUFFER_SIZE = 65536; private final int RECEIVE_BUFFER_SIZE = 65536; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java index fe4350be17c..64edf485b29 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java @@ -18,16 +18,16 @@ package org.apache.hadoop.oncrpc.security; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Base class for all credentials. Currently we only support 3 different types * of auth flavors: AUTH_NONE, AUTH_SYS, and RPCSEC_GSS. */ public abstract class Credentials extends RpcAuthInfo { - public static final Log LOG = LogFactory.getLog(Credentials.class); + public static final Logger LOG = LoggerFactory.getLogger(Credentials.class); public static Credentials readFlavorAndCredentials(XDR xdr) { AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java index 93efba89a51..4a674e81de0 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/SecurityHandler.java @@ -19,13 +19,14 @@ import java.io.IOException; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.XDR; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public abstract class SecurityHandler { - public static final Log LOG = LogFactory.getLog(SecurityHandler.class); + public static final Logger LOG = + LoggerFactory.getLogger(SecurityHandler.class); public abstract String getUser(); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java index 7586fdad676..123999d5e14 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/Portmap.java @@ -22,8 +22,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.RpcUtil; import org.apache.hadoop.util.StringUtils; @@ -41,12 +39,14 @@ import org.jboss.netty.util.HashedWheelTimer; import com.google.common.annotations.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Portmap service for binding RPC protocols. See RFC 1833 for details. */ final class Portmap { - private static final Log LOG = LogFactory.getLog(Portmap.class); + private static final Logger LOG = LoggerFactory.getLogger(Portmap.class); private static final int DEFAULT_IDLE_TIME_MILLISECONDS = 5000; private ConnectionlessBootstrap udpServer; @@ -65,7 +65,7 @@ public static void main(String[] args) { pm.start(DEFAULT_IDLE_TIME_MILLISECONDS, new InetSocketAddress(port), new InetSocketAddress(port)); } catch (Throwable e) { - LOG.fatal("Failed to start the server. Cause:", e); + LOG.error("Failed to start the server. Cause:", e); pm.shutdown(); System.exit(-1); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java index 67175d0640d..0bc380f614c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java @@ -19,8 +19,6 @@ import java.util.concurrent.ConcurrentHashMap; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.RpcInfo; @@ -39,6 +37,8 @@ import org.jboss.netty.handler.timeout.IdleState; import org.jboss.netty.handler.timeout.IdleStateAwareChannelUpstreamHandler; import org.jboss.netty.handler.timeout.IdleStateEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler { static final int PROGRAM = 100000; @@ -51,7 +51,8 @@ final class RpcProgramPortmap extends IdleStateAwareChannelUpstreamHandler { static final int PMAPPROC_DUMP = 4; static final int PMAPPROC_GETVERSADDR = 9; - private static final Log LOG = LogFactory.getLog(RpcProgramPortmap.class); + private static final Logger LOG = + LoggerFactory.getLogger(RpcProgramPortmap.class); private final ConcurrentHashMap map = new ConcurrentHashMap();