diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java index 23e7b74469c..5365f6023ae 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRequestHedgingProxyProvider.java @@ -39,7 +39,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -57,6 +56,7 @@ import static org.mockito.Mockito.mock; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.slf4j.event.Level; public class TestRequestHedgingProxyProvider { diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/util/TestByteArrayManager.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/util/TestByteArrayManager.java index a8d5cef6b2f..a47ffa77136 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/util/TestByteArrayManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/util/TestByteArrayManager.java @@ -24,11 +24,11 @@ import org.apache.hadoop.hdfs.util.ByteArrayManager.ManagerMap; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.util.ArrayList; import java.util.Collections; @@ -50,7 +50,7 @@ public class TestByteArrayManager { static { GenericTestUtils.setLogLevel( - LoggerFactory.getLogger(ByteArrayManager.class), Level.ALL); + LoggerFactory.getLogger(ByteArrayManager.class), Level.TRACE); } static final Logger LOG = LoggerFactory.getLogger(TestByteArrayManager.class); @@ -559,8 +559,8 @@ public synchronized int release(byte[] array) { } public static void main(String[] args) throws Exception { - GenericTestUtils.setLogLevel(LoggerFactory.getLogger(ByteArrayManager.class), - Level.OFF); + GenericTestUtils.disableLog( + LoggerFactory.getLogger(ByteArrayManager.class)); final int arrayLength = 64 * 1024; //64k final int nThreads = 512; final int nAllocations = 1 << 15; diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/test/TestFuseDFS.java b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/test/TestFuseDFS.java index dabbe00b016..33fe4464e65 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/test/TestFuseDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/test/TestFuseDFS.java @@ -21,7 +21,7 @@ import java.util.ArrayList; import java.util.concurrent.atomic.*; -import org.apache.log4j.Level; +import org.slf4j.event.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; @@ -50,7 +50,7 @@ public class TestFuseDFS { private static final Logger LOG = LoggerFactory.getLogger(TestFuseDFS.class); { - GenericTestUtils.setLogLevel(LOG, Level.ALL); + GenericTestUtils.setLogLevel(LOG, Level.TRACE); } /** Dump the given intput stream to stderr */ diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterNamenodeMonitoring.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterNamenodeMonitoring.java index d2b337c1b7a..4fae86b01d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterNamenodeMonitoring.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterNamenodeMonitoring.java @@ -54,14 +54,15 @@ import org.apache.hadoop.hdfs.server.federation.resolver.NamenodeStatusReport; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport; import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * Test namenodes monitor behavior in the Router. @@ -300,7 +301,7 @@ private void verifyUrlSchemes(String scheme) { final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger(); logger.addAppender(appender); - logger.setLevel(Level.DEBUG); + GenericTestUtils.setRootLogLevel(Level.DEBUG); // Setup and start the Router Configuration conf = getNamenodesConfig(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java index 042431e2a6c..fd81a1e23fb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java @@ -38,10 +38,10 @@ import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.event.Level; /** * Test symbolic links in Hdfs. @@ -49,7 +49,7 @@ abstract public class TestSymlinkHdfs extends SymlinkBaseTest { { - GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL); + GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.TRACE); } protected static MiniDFSCluster cluster; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java index 45a521fe961..7aa89596c11 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java @@ -190,12 +190,12 @@ import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.VersionInfo; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Assume; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting; +import org.slf4j.event.Level; /** Utilities for HDFS tests */ public class DFSTestUtil { @@ -1992,15 +1992,6 @@ public static void setNameNodeLogLevel(Level level) { GenericTestUtils.setLogLevel(NameNode.blockStateChangeLog, level); } - public static void setNameNodeLogLevel(org.slf4j.event.Level level) { - GenericTestUtils.setLogLevel(FSNamesystem.LOG, level); - GenericTestUtils.setLogLevel(BlockManager.LOG, level); - GenericTestUtils.setLogLevel(LeaseManager.LOG, level); - GenericTestUtils.setLogLevel(NameNode.LOG, level); - GenericTestUtils.setLogLevel(NameNode.stateChangeLog, level); - GenericTestUtils.setLogLevel(NameNode.blockStateChangeLog, level); - } - /** * Get the NamenodeProtocol RPC proxy for the NN associated with this * DFSClient object diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java index 0ef3b75adce..b4e9550e118 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java @@ -45,13 +45,13 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.TestFileTruncate; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; +import org.slf4j.event.Level; /** * Test randomly mixing append, snapshot and truncate operations. @@ -60,7 +60,7 @@ */ public class TestAppendSnapshotTruncate { static { - GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL); + GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.TRACE); } private static final Logger LOG = LoggerFactory.getLogger(TestAppendSnapshotTruncate.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java index e377a539e3c..30115efc384 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java @@ -90,7 +90,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -98,6 +97,7 @@ import org.mockito.internal.stubbing.answers.ThrowsException; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; +import org.slf4j.event.Level; /** @@ -959,7 +959,7 @@ public void testNamenodeRestart() throws Exception { public static void namenodeRestartTest(final Configuration conf, final boolean isWebHDFS) throws Exception { - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); final List exceptions = new ArrayList(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientSocketSize.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientSocketSize.java index 40cd676f3ec..1e6f03a1d01 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientSocketSize.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientSocketSize.java @@ -20,12 +20,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.io.IOException; import java.net.Socket; @@ -37,7 +37,7 @@ public class TestDFSClientSocketSize { private static final Logger LOG = LoggerFactory.getLogger( TestDFSClientSocketSize.class); static { - GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java index cc4dd92d5c4..72ac47c6796 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java @@ -40,7 +40,6 @@ import org.apache.commons.lang3.RandomStringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.log4j.Level; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; @@ -72,6 +71,7 @@ import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Assert; +import org.slf4j.event.Level; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS; @@ -1962,7 +1962,7 @@ public Object run() throws Exception { @Test (timeout = 30000) public void testGet() throws IOException { - GenericTestUtils.setLogLevel(FSInputChecker.LOG, Level.ALL); + GenericTestUtils.setLogLevel(FSInputChecker.LOG, Level.TRACE); final String fname = "testGet.txt"; Path root = new Path("/test/get"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedOutputStream.java index 79112af12c3..9044a6d0cb0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedOutputStream.java @@ -47,20 +47,20 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; +import org.slf4j.event.Level; public class TestDFSStripedOutputStream { public static final Logger LOG = LoggerFactory.getLogger( TestDFSStripedOutputStream.class); static { - GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE); } private ErasureCodingPolicy ecPolicy; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java index a1aae925947..c5141f34ee9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java @@ -34,19 +34,18 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Test; +import org.slf4j.event.Level; /** * This class tests that pipelines survive data node death and recovery. */ public class TestDatanodeDeath { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); - GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, org.slf4j - .event.Level.TRACE); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, Level.TRACE); } static final int blockSize = 8192; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java index cb4cefb020c..18209a4d179 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDecommission.java @@ -73,13 +73,13 @@ import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.eclipse.jetty.util.ajax.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * This class tests the decommissioning of nodes. @@ -1225,8 +1225,8 @@ public Boolean get() { @Test(timeout=120000) public void testBlocksPerInterval() throws Exception { - org.apache.log4j.Logger.getLogger(DatanodeAdminManager.class) - .setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(DatanodeAdminManager.class), Level.TRACE); // Turn the blocks per interval way down getConf().setInt( DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY, @@ -1327,8 +1327,8 @@ public void testPendingNodeButDecommissioned() throws Exception { @Test(timeout=120000) public void testPendingNodes() throws Exception { - org.apache.log4j.Logger.getLogger(DatanodeAdminManager.class) - .setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(DatanodeAdminManager.class), Level.TRACE); // Only allow one node to be decom'd at a time getConf().setInt( DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptedTransfer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptedTransfer.java index 73df025013d..f9336fcfdc7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptedTransfer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptedTransfer.java @@ -53,8 +53,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.LogCapturer; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -65,12 +63,15 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; +import org.slf4j.event.Level; @RunWith(Parameterized.class) public class TestEncryptedTransfer { { - LogManager.getLogger(SaslDataTransferServer.class).setLevel(Level.DEBUG); - LogManager.getLogger(DataTransferSaslUtil.class).setLevel(Level.DEBUG); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(SaslDataTransferServer.class), Level.DEBUG); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(DataTransferSaslUtil.class), Level.DEBUG); } @Rule diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java index 2bd2324491b..030f56aad8a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java @@ -43,6 +43,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.conf.Configuration; @@ -103,8 +104,6 @@ import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension; import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension; import org.apache.hadoop.io.Text; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -146,6 +145,9 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import org.xml.sax.InputSource; import org.xml.sax.helpers.DefaultHandler; @@ -153,7 +155,7 @@ import javax.xml.parsers.SAXParserFactory; public class TestEncryptionZones { - static final Logger LOG = Logger.getLogger(TestEncryptionZones.class); + static final Logger LOG = LoggerFactory.getLogger(TestEncryptionZones.class); protected Configuration conf; private FileSystemTestHelper fsHelper; @@ -197,7 +199,8 @@ public void setup() throws Exception { 2); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); cluster.waitActive(); - Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE); fs = cluster.getFileSystem(); fsWrapper = new FileSystemTestWrapper(fs); fcWrapper = new FileContextTestWrapper( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend2.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend2.java index 28e51b80f4e..9929cb24b23 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend2.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend2.java @@ -42,8 +42,8 @@ import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Test; +import org.slf4j.event.Level; /** * This class tests the building blocks that are needed to @@ -52,9 +52,9 @@ public class TestFileAppend2 { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } static final int numBlocks = 5; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java index 6cb63a24306..3e9adcac7e7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java @@ -46,20 +46,19 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.event.Level; /** This class implements some of tests posted in HADOOP-2658. */ public class TestFileAppend3 { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); - GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, org.slf4j - .event.Level.TRACE); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, Level.TRACE); } static final long BLOCK_SIZE = 64 * 1024; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java index 63e7eb60511..8c672b585df 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java @@ -47,9 +47,9 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; /* File Append tests for HDFS-200 & HDFS-142, specifically focused on: * using append()/sync() to recover block information @@ -67,9 +67,9 @@ public class TestFileAppend4 { FSDataOutputStream stm; { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } @Before diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java index 7557197669d..0c7a3fcaae2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileConcurrentReader.java @@ -38,11 +38,12 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -59,7 +60,7 @@ private enum SyncType { private static final Logger LOG = - Logger.getLogger(TestFileConcurrentReader.class); + LoggerFactory.getLogger(TestFileConcurrentReader.class); { GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.TRACE); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java index 3665fef3ff8..381cf1694f5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCorruption.java @@ -55,18 +55,18 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; -import org.apache.log4j.Level; import org.junit.Test; import org.slf4j.Logger; +import org.slf4j.event.Level; /** * A JUnit test for corrupted file handling. */ public class TestFileCorruption { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } static Logger LOG = NameNode.stateChangeLog; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationDelete.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationDelete.java index 58e11949579..728fa7557e3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationDelete.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationDelete.java @@ -24,12 +24,12 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.log4j.Level; import org.junit.Test; +import org.slf4j.event.Level; public class TestFileCreationDelete { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHFlush.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHFlush.java index ce49f698210..d700765bd03 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHFlush.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHFlush.java @@ -35,15 +35,15 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Test; +import org.slf4j.event.Level; /** Class contains a set of tests to verify the correctness of * newly introduced {@link FSDataOutputStream#hflush()} method */ public class TestHFlush { { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } private final String fName = "hflushtest.dat"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLeaseRecoveryStriped.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLeaseRecoveryStriped.java index d0eccc14a42..5aa1f638370 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLeaseRecoveryStriped.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLeaseRecoveryStriped.java @@ -35,11 +35,11 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.Whitebox; import org.apache.hadoop.util.StringUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -71,7 +71,7 @@ public class TestLeaseRecoveryStriped { private static final int bytesPerChecksum = 512; static { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); GenericTestUtils.setLogLevel(DFSStripedOutputStream.LOG, Level.DEBUG); GenericTestUtils.setLogLevel(BlockRecoveryWorker.LOG, Level.DEBUG); GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.DEBUG); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestParallelReadUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestParallelReadUtil.java index a3f4dbc7b85..857ab7bdefa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestParallelReadUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestParallelReadUtil.java @@ -25,16 +25,16 @@ import java.nio.ByteBuffer; import java.util.Random; +import org.apache.hadoop.test.GenericTestUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.client.impl.BlockReaderTestUtil; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; import org.junit.Ignore; import org.junit.Test; +import org.slf4j.event.Level; /** * Driver class for testing the use of DFSInputStream by multiple concurrent @@ -57,8 +57,9 @@ public class TestParallelReadUtil { static { // The client-trace log ends up causing a lot of blocking threads // in this when it's being used as a performance benchmark. - LogManager.getLogger(DataNode.class.getName() + ".clienttrace") - .setLevel(Level.WARN); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(DataNode.class.getName() + ".clienttrace"), + Level.WARN); } private class TestFileInfo { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java index 7125b0e7c84..4cead9c48a0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPipelines.java @@ -35,10 +35,10 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.Replica; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; public class TestPipelines { public static final Logger LOG = LoggerFactory.getLogger(TestPipelines.class); @@ -158,8 +158,8 @@ private static void setConfiguration() { } private static void initLoggers() { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java index ea332c85d23..ac0994d8893 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java @@ -50,7 +50,6 @@ import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -61,6 +60,7 @@ import java.util.function.Supplier; import org.slf4j.LoggerFactory; import org.slf4j.Logger; +import org.slf4j.event.Level; /** * This class tests the DFS positional read functionality in a single node @@ -278,7 +278,7 @@ public void testPreadDFS() throws IOException { @Test public void testPreadDFSNoChecksum() throws IOException { Configuration conf = new Configuration(); - GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.TRACE); dfsPreadTest(conf, false, false); dfsPreadTest(conf, true, false); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java index c4c263a23ec..902b8992773 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java @@ -66,11 +66,11 @@ import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; public class TestReconstructStripedFile { public static final Logger LOG = @@ -85,9 +85,9 @@ public class TestReconstructStripedFile { private int dnNum; static { - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); - GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL); - GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.ALL); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.TRACE); } enum ReconstructionType { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRenameWhileOpen.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRenameWhileOpen.java index 827577d1f8c..9b29fe80c44 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRenameWhileOpen.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRenameWhileOpen.java @@ -27,13 +27,13 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.server.namenode.FSEditLog; -import org.apache.log4j.Level; import org.junit.Test; import org.mockito.Mockito; +import org.slf4j.event.Level; public class TestRenameWhileOpen { { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } //TODO: un-comment checkFullFile once the lease recovery is done diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeOnFailure.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeOnFailure.java index a63eb421eed..925f93648d4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeOnFailure.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReplaceDatanodeOnFailure.java @@ -37,9 +37,9 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure.Policy; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; /** * This class tests that data nodes are correctly replaced on failure. @@ -54,7 +54,7 @@ public class TestReplaceDatanodeOnFailure { final private static String RACK1 = "/rack1"; { - GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.TRACE); } /** Test DEFAULT ReplaceDatanodeOnFailure policy. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReservedRawPaths.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReservedRawPaths.java index 12b86cbdbb8..0eb06056db9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReservedRawPaths.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReservedRawPaths.java @@ -41,11 +41,12 @@ import org.apache.hadoop.hdfs.server.namenode.INodesInPath; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesEqual; import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesNotEqual; @@ -83,7 +84,8 @@ public void setup() throws Exception { JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri() ); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); - Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE); fs = cluster.getFileSystem(); fsWrapper = new FileSystemTestWrapper(cluster.getFileSystem()); fcWrapper = new FileContextTestWrapper( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStripedFileAppend.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStripedFileAppend.java index 29ac3943637..a00f67ac3b5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStripedFileAppend.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestStripedFileAppend.java @@ -26,10 +26,10 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.OpenFileEntry; import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; import java.io.IOException; import java.util.ArrayList; @@ -50,7 +50,7 @@ public class TestStripedFileAppend { public static final Log LOG = LogFactory.getLog(TestStripedFileAppend.class); static { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } private static final int NUM_DATA_BLOCKS = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestTrashWithEncryptionZones.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestTrashWithEncryptionZones.java index 2a8d493929e..7699e3e23ff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestTrashWithEncryptionZones.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestTrashWithEncryptionZones.java @@ -34,11 +34,13 @@ import org.apache.hadoop.hdfs.client.HdfsAdmin; import org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.io.File; import java.security.PrivilegedExceptionAction; @@ -91,8 +93,8 @@ public void setup() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES, 2); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); - org.apache.log4j.Logger - .getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE); fs = cluster.getFileSystem(); fsWrapper = new FileSystemTestWrapper(fs); dfsAdmin = new HdfsAdmin(cluster.getURI(), conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java index 76893615f99..566f91b32f4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java @@ -25,10 +25,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; +import org.slf4j.event.Level; import java.io.IOException; import java.util.concurrent.atomic.AtomicInteger; @@ -41,8 +41,8 @@ public class TestWriteStripedFileWithFailure { private Configuration conf = new HdfsConfiguration(); static { - GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE); } private final ErasureCodingPolicy ecPolicy = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java index e2f55e53f36..71f71208d3c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/BlockReaderTestUtil.java @@ -59,8 +59,9 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.apache.hadoop.test.GenericTestUtils; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * A helper class to setup the cluster, and get to BlockReader and DataNode for a block. @@ -238,33 +239,27 @@ public DataNode getDataNode(LocatedBlock testBlock) { } public static void enableHdfsCachingTracing() { - LogManager.getLogger(CacheReplicationMonitor.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(CacheManager.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(FsDatasetCache.class.getName()).setLevel( - Level.TRACE); + enableTraceLog(CacheReplicationMonitor.class); + enableTraceLog(CacheManager.class); + enableTraceLog(FsDatasetCache.class); } public static void enableBlockReaderFactoryTracing() { - LogManager.getLogger(BlockReaderFactory.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(ShortCircuitCache.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(ShortCircuitReplica.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(BlockReaderLocal.class.getName()).setLevel( - Level.TRACE); + enableTraceLog(BlockReaderFactory.class); + enableTraceLog(ShortCircuitCache.class); + enableTraceLog(ShortCircuitReplica.class); + enableTraceLog(BlockReaderLocal.class); } public static void enableShortCircuitShmTracing() { - LogManager.getLogger(DfsClientShmManager.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(ShortCircuitRegistry.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(ShortCircuitShm.class.getName()).setLevel( - Level.TRACE); - LogManager.getLogger(DataNode.class.getName()).setLevel( - Level.TRACE); + enableTraceLog(DfsClientShmManager.class); + enableTraceLog(ShortCircuitRegistry.class); + enableTraceLog(ShortCircuitShm.class); + enableTraceLog(DataNode.class); + } + + private static void enableTraceLog(Class clazz) { + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(clazz), Level.TRACE); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/TestClientBlockVerification.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/TestClientBlockVerification.java index fc2de94a7f3..54156f666d3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/TestClientBlockVerification.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/client/impl/TestClientBlockVerification.java @@ -29,10 +29,10 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.event.Level; public class TestClientBlockVerification { @@ -42,7 +42,7 @@ public class TestClientBlockVerification { static LocatedBlock testBlock = null; static { - GenericTestUtils.setLogLevel(BlockReaderRemote.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockReaderRemote.LOG, Level.TRACE); } @BeforeClass public static void setupCluster() throws Exception { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java index 8ead48f6f8a..ceca1ead10d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java @@ -53,7 +53,6 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -64,6 +63,7 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.apache.hadoop.thirdparty.com.google.common.collect.Sets; +import org.slf4j.event.Level; public class TestQJMWithFaults { @@ -225,7 +225,7 @@ public void testRandomized() throws Exception { // If the user specifies a seed, then we should gather all the // IPC trace information so that debugging is easier. This makes // the test run about 25% slower otherwise. - GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL); + GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.TRACE); } else { seed = new Random().nextLong(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java index 251fadf20e1..fb1cdb5de0b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java @@ -60,7 +60,6 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -70,6 +69,7 @@ import org.mockito.stubbing.Stubber; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; +import org.slf4j.event.Level; /** * Functional tests for QuorumJournalManager. @@ -87,7 +87,7 @@ public class TestQuorumJournalManager { private final List toClose = Lists.newLinkedList(); static { - GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL); + GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.TRACE); } @Rule diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java index fe833ecc522..524656aa763 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java @@ -87,7 +87,6 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; @@ -99,6 +98,7 @@ import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.fs.StorageType; +import org.slf4j.event.Level; /** Unit tests for block tokens */ public class TestBlockToken { @@ -107,11 +107,11 @@ public class TestBlockToken { private static final String ADDRESS = "0.0.0.0"; static { - GenericTestUtils.setLogLevel(Client.LOG, Level.ALL); - GenericTestUtils.setLogLevel(Server.LOG, Level.ALL); - GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.ALL); - GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.ALL); - GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.ALL); + GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java index 40afebff499..e364ae62ca5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancerWithMultipleNameNodes.java @@ -61,7 +61,7 @@ public class TestBalancerWithMultipleNameNodes { static final Logger LOG = Balancer.LOG; { GenericTestUtils.setLogLevel(LOG, Level.TRACE); - DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java index c2a5a097ac3..ec86093ad54 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java @@ -35,13 +35,13 @@ import org.apache.hadoop.net.Node; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; +import org.slf4j.event.Level; abstract public class BaseReplicationPolicyTest { { - GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE); } protected NetworkTopology cluster; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockReportRateLimiting.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockReportRateLimiting.java index 16279bb0883..9f8b2c7465f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockReportRateLimiting.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockReportRateLimiting.java @@ -31,11 +31,11 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.server.protocol.BlockReportContext; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.event.Level; import java.io.IOException; import java.util.HashSet; @@ -60,8 +60,8 @@ public void restoreNormalBlockManagerFaultInjector() { @BeforeClass public static void raiseBlockManagerLogLevels() { - GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL); - GenericTestUtils.setLogLevel(BlockReportLeaseManager.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(BlockReportLeaseManager.LOG, Level.TRACE); } @Test(timeout=180000) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockTokenWithDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockTokenWithDFS.java index b57c4f3a40d..428e252839c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockTokenWithDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockTokenWithDFS.java @@ -64,9 +64,9 @@ import org.apache.hadoop.net.ServerSocketUtil; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; public class TestBlockTokenWithDFS { @@ -77,7 +77,7 @@ public class TestBlockTokenWithDFS { private static final String FILE_TO_APPEND = "/fileToAppend.dat"; { - GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE); } public static byte[] generateBytes(int fileSize){ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestNameNodePrunesMissingStorages.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestNameNodePrunesMissingStorages.java index a73fd69f64d..dea893bab3b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestNameNodePrunesMissingStorages.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestNameNodePrunesMissingStorages.java @@ -45,9 +45,9 @@ import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -319,7 +319,7 @@ public void testRenamingStorageIds() throws Exception { .Builder(conf).numDataNodes(1) .storagesPerDatanode(1) .build(); - GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE); try { cluster.waitActive(); final Path TEST_PATH = new Path("/foo1"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingInvalidateBlock.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingInvalidateBlock.java index c298761719e..34e7390775a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingInvalidateBlock.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingInvalidateBlock.java @@ -32,12 +32,12 @@ import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.Whitebox; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; +import org.slf4j.event.Level; import java.util.function.Supplier; @@ -47,7 +47,7 @@ */ public class TestPendingInvalidateBlock { { - GenericTestUtils.setLogLevel(BlockManager.LOG, Level.DEBUG); + GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE); } private static final int BLOCKSIZE = 1024; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingReconstruction.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingReconstruction.java index 41303f101dd..ea7347f9e50 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingReconstruction.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestPendingReconstruction.java @@ -59,10 +59,10 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.LogCapturer; -import org.apache.log4j.Level; import org.junit.Test; import org.mockito.Mockito; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * This class tests the internals of PendingReconstructionBlocks.java, as well diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReconstructStripedBlocksWithRackAwareness.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReconstructStripedBlocksWithRackAwareness.java index 6bfc0b0deff..43f3243b5f6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReconstructStripedBlocksWithRackAwareness.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReconstructStripedBlocksWithRackAwareness.java @@ -35,13 +35,13 @@ import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.Whitebox; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.io.IOException; import java.util.Arrays; @@ -53,9 +53,9 @@ public class TestReconstructStripedBlocksWithRackAwareness { TestReconstructStripedBlocksWithRackAwareness.class); static { - GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL); - GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.ALL); - GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.TRACE); + GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE); } private final ErasureCodingPolicy ecPolicy = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java index f500ae0fe49..1a65d2f024e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/BlockReportTestBase.java @@ -69,13 +69,13 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.DelayAnswer; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; +import org.slf4j.event.Level; /** * This is the base class for simulating a variety of situations @@ -877,9 +877,9 @@ public boolean accept(File file, String s) { } private static void initLoggers() { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); - GenericTestUtils.setLogLevel(BlockReportTestBase.LOG, org.slf4j.event.Level.DEBUG); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(BlockReportTestBase.LOG, Level.DEBUG); } private Block findBlock(Path path, long size) throws IOException { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java index bf919e16fcf..a3627116133 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBPOfferService.java @@ -86,7 +86,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -97,6 +96,7 @@ import java.util.function.Supplier; import org.apache.hadoop.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; +import org.slf4j.event.Level; public class TestBPOfferService { @@ -114,7 +114,7 @@ public class TestBPOfferService { private long nextFullBlockReportLeaseId = 1L; static { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); } private DatanodeProtocolClientSideTranslatorPB mockNN1; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockScanner.java index b34b7df0a92..fdbcb51ae45 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockScanner.java @@ -62,12 +62,12 @@ import org.apache.hadoop.hdfs.server.datanode.VolumeScanner.Statistics; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; public class TestBlockScanner { public static final Logger LOG = @@ -76,9 +76,9 @@ public class TestBlockScanner { @Before public void before() { BlockScanner.Conf.allowUnitTestSettings = true; - GenericTestUtils.setLogLevel(BlockScanner.LOG, Level.ALL); - GenericTestUtils.setLogLevel(VolumeScanner.LOG, Level.ALL); - GenericTestUtils.setLogLevel(FsVolumeImpl.LOG, Level.ALL); + GenericTestUtils.setLogLevel(BlockScanner.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(VolumeScanner.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(FsVolumeImpl.LOG, Level.TRACE); } private static void disableBlockScanner(Configuration conf) { @@ -899,7 +899,7 @@ public Boolean get() { */ @Test(timeout=120000) public void testAppendWhileScanning() throws Exception { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); Configuration conf = new Configuration(); // throttle the block scanner: 1MB per second conf.setLong(DFS_BLOCK_SCANNER_VOLUME_BYTES_PER_SECOND, 1048576); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeLifeline.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeLifeline.java index 283b8bd6d25..44f90690e34 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeLifeline.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeLifeline.java @@ -57,8 +57,6 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; - import org.junit.After; import org.junit.Before; import org.junit.Rule; @@ -70,6 +68,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.util.function.Supplier; @@ -82,7 +81,7 @@ public class TestDataNodeLifeline { TestDataNodeLifeline.class); static { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); } @Rule diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java index d5e3931dd51..12b251fbb43 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java @@ -75,7 +75,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.AutoCloseableLock; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.apache.log4j.SimpleLayout; import org.apache.log4j.WriterAppender; import org.junit.Before; @@ -83,6 +82,7 @@ import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * Tests {@link DirectoryScanner} handling of differences between blocks on the @@ -410,7 +410,7 @@ public void testScanDirectoryStructureWarn() throws Exception { ByteArrayOutputStream loggerStream = new ByteArrayOutputStream(); org.apache.log4j.Logger rootLogger = org.apache.log4j.Logger.getRootLogger(); - rootLogger.setLevel(Level.INFO); + GenericTestUtils.setRootLogLevel(Level.INFO); WriterAppender writerAppender = new WriterAppender(new SimpleLayout(), loggerStream); rootLogger.addAppender(writerAppender); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java index 99dc783c86a..21e264af198 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java @@ -36,11 +36,11 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport; -import org.apache.log4j.Level; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; +import org.slf4j.event.Level; /** * Tests that very large block reports can pass through the RPC server and diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestTransferRbw.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestTransferRbw.java index 7859657efbc..8653f4b208f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestTransferRbw.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestTransferRbw.java @@ -38,9 +38,9 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY; @@ -50,7 +50,7 @@ public class TestTransferRbw { LoggerFactory.getLogger(TestTransferRbw.class); { - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); } private static final Random RAN = new Random(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java index cf35ba99fd0..c4280bc97b4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImplTestUtils.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.DataChecksum; -import org.apache.log4j.Level; +import org.slf4j.event.Level; import java.io.DataOutputStream; import java.io.File; @@ -500,7 +500,6 @@ public void verifyBlockPoolMissing(String bpid) throws IOException { * @param level the level to set */ public static void setFsDatasetImplLogLevel(Level level) { - GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, - org.slf4j.event.Level.valueOf(level.toString())); + GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, level); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java index 9d5bfd7b2e8..5bb67882a8e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestSpaceReservation.java @@ -45,13 +45,13 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Daemon; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.Mockito; +import org.slf4j.event.Level; import java.io.IOException; import java.io.OutputStream; @@ -101,8 +101,8 @@ private void initConfig(int blockSize) { } static { - GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.ALL); - GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL); + GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestDataNodeOutlierDetectionViaMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestDataNodeOutlierDetectionViaMetrics.java index 3eb5df4e3ab..1faddb362ec 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestDataNodeOutlierDetectionViaMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestDataNodeOutlierDetectionViaMetrics.java @@ -23,13 +23,13 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.metrics2.lib.MetricsTestHelper; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.util.Map; import java.util.Random; @@ -67,8 +67,8 @@ public class TestDataNodeOutlierDetectionViaMetrics { @Before public void setup() { - GenericTestUtils.setLogLevel(DataNodePeerMetrics.LOG, Level.ALL); - GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.ALL); + GenericTestUtils.setLogLevel(DataNodePeerMetrics.LOG, Level.TRACE); + GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.TRACE); conf = new HdfsConfiguration(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestSlowNodeDetector.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestSlowNodeDetector.java index bd0cf3e3634..8a771e42e4f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestSlowNodeDetector.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/metrics/TestSlowNodeDetector.java @@ -23,13 +23,13 @@ import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.util.ArrayList; import java.util.Collections; @@ -239,7 +239,7 @@ public class TestSlowNodeDetector { public void setup() { slowNodeDetector = new OutlierDetector(MIN_OUTLIER_DETECTION_PEERS, (long) LOW_THRESHOLD); - GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.ALL); + GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.TRACE); } @Test diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java index 513c6094c57..542f14964f8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java @@ -85,8 +85,7 @@ import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.VersionInfo; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.slf4j.event.Level; /** * Main class for a series of name-node benchmarks. @@ -150,9 +149,9 @@ static void setNameNodeLoggingLevel(Level logLevel) { LOG.info("Log level = " + logLevel.toString()); // change log level to NameNode logs DFSTestUtil.setNameNodeLogLevel(logLevel); - GenericTestUtils.setLogLevel(LogManager.getLogger( + GenericTestUtils.setLogLevel(LoggerFactory.getLogger( NetworkTopology.class.getName()), logLevel); - GenericTestUtils.setLogLevel(LogManager.getLogger( + GenericTestUtils.setLogLevel(LoggerFactory.getLogger( Groups.class.getName()), logLevel); } @@ -353,7 +352,7 @@ protected boolean verifyOpArgument(List args) { if(llIndex >= 0) { if(args.size() <= llIndex + 1) printUsage(); - logLevel = Level.toLevel(args.get(llIndex+1), Level.ERROR); + logLevel = Level.valueOf(args.get(llIndex+1)); args.remove(llIndex+1); args.remove(llIndex); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogAtDebug.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogAtDebug.java index 3eb311450e7..c86b04cb773 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogAtDebug.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogAtDebug.java @@ -26,10 +26,10 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.FSNamesystemAuditLogger; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; +import org.slf4j.event.Level; import java.net.Inet4Address; import java.util.Arrays; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java index d02232feab2..ee98160d8ea 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java @@ -39,7 +39,6 @@ import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.LogCapturer; -import org.apache.log4j.Level; import org.junit.Before; import org.junit.Test; @@ -48,6 +47,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.io.IOException; import java.net.HttpURLConnection; @@ -85,7 +85,7 @@ public class TestAuditLogger { private static final Logger LOG = LoggerFactory.getLogger( TestAuditLogger.class); static { - GenericTestUtils.setLogLevel(LOG, Level.ALL); + GenericTestUtils.setLogLevel(LOG, Level.TRACE); } private static final short TEST_PERMISSION = (short) 0654; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java index 103391917cc..9fe7404eed8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.apache.log4j.Appender; import org.apache.log4j.AsyncAppender; @@ -61,6 +62,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; +import org.slf4j.LoggerFactory; /** * A JUnit test that audit logs are generated @@ -297,11 +299,11 @@ private void configureAuditLogs() throws IOException { if (file.exists()) { assertTrue(file.delete()); } - Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); // disable logging while the cluster startup preps files - logger.setLevel(Level.OFF); + disableAuditLog(); PatternLayout layout = new PatternLayout("%m%n"); RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile); + Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); logger.addAppender(appender); } @@ -314,10 +316,10 @@ private void verifyAuditLogs(boolean expectSuccess) throws IOException { private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe) throws IOException { // Turn off the logs - Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); - logger.setLevel(Level.OFF); + disableAuditLog(); // Close the appenders and force all logs to be flushed + Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); Enumeration appenders = logger.getAllAppenders(); while (appenders.hasMoreElements()) { Appender appender = (Appender)appenders.nextElement(); @@ -347,10 +349,10 @@ private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe) private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Pattern pattern) throws IOException { // Turn off the logs - Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); - logger.setLevel(Level.OFF); + disableAuditLog(); // Close the appenders and force all logs to be flushed + Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); Enumeration appenders = logger.getAllAppenders(); while (appenders.hasMoreElements()) { Appender appender = (Appender)appenders.nextElement(); @@ -376,4 +378,10 @@ private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Patte reader.close(); } } + + private void disableAuditLog() { + GenericTestUtils.disableLog(LoggerFactory.getLogger( + FSNamesystem.class.getName() + ".audit")); + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java index 800f273edb5..68ace04788e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java @@ -57,11 +57,12 @@ import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.util.HostsFileWriter; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; /** * This class tests the decommissioning of nodes. @@ -107,8 +108,9 @@ protected Configuration setupConfig() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1); conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1); conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1); - Logger.getLogger(DatanodeAdminManager.class).setLevel(Level.DEBUG); - LOG = Logger.getLogger(TestDecommissioningStatus.class); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(DatanodeAdminManager.class), Level.DEBUG); + LOG = LoggerFactory.getLogger(TestDecommissioningStatus.class); return conf; } @@ -388,8 +390,8 @@ public void testDecommissionStatusAfterDNRestart() throws Exception { */ @Test(timeout=120000) public void testDecommissionDeadDN() throws Exception { - Logger log = Logger.getLogger(DatanodeAdminManager.class); - log.setLevel(Level.DEBUG); + Logger log = LoggerFactory.getLogger(DatanodeAdminManager.class); + GenericTestUtils.setLogLevel(log, Level.DEBUG); DatanodeID dnID = cluster.getDataNodes().get(0).getDatanodeId(); String dnName = dnID.getXferAddr(); DataNodeProperties stoppedDN = cluster.stopDataNode(0); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java index 0592b97b78c..dbf65fadb10 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLog.java @@ -91,7 +91,6 @@ import org.apache.hadoop.util.ExitUtil.ExitException; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; -import org.apache.log4j.Level; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.LogManager; import org.apache.log4j.spi.LoggingEvent; @@ -100,6 +99,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.mockito.Mockito; +import org.slf4j.event.Level; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; @@ -113,7 +113,7 @@ public class TestEditLog { static { - GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL); + GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.TRACE); } @Parameters diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsImageValidation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsImageValidation.java index 09f686ea59b..af30f1acde4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsImageValidation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsImageValidation.java @@ -20,11 +20,12 @@ import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.HAUtil; -import org.apache.log4j.Level; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; public class TestFsImageValidation { static final Logger LOG = LoggerFactory.getLogger( @@ -32,9 +33,11 @@ public class TestFsImageValidation { static { final Level t = Level.TRACE; - FsImageValidation.Util.setLogLevel(FsImageValidation.class, t); - FsImageValidation.Util.setLogLevel(INodeReferenceValidation.class, t); - FsImageValidation.Util.setLogLevel(INode.class, t); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(FsImageValidation.class), t); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(INodeReferenceValidation.class), t); + GenericTestUtils.setLogLevel(LoggerFactory.getLogger(INode.class), t); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java index d89bb6b4332..ca5a87033e5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java @@ -264,8 +264,8 @@ private void setupAuditLogs() throws IOException { private void verifyAuditLogs() throws IOException { // Turn off the logs - Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); - logger.setLevel(Level.OFF); + GenericTestUtils.disableLog(LoggerFactory.getLogger( + FSNamesystem.class.getName() + ".audit")); BufferedReader reader = null; try { @@ -292,6 +292,7 @@ private void verifyAuditLogs() throws IOException { if (reader != null) { reader.close(); } + Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); if (logger != null) { logger.removeAllAppenders(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsckWithMultipleNameNodes.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsckWithMultipleNameNodes.java index e414296796d..327c51ca765 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsckWithMultipleNameNodes.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsckWithMultipleNameNodes.java @@ -36,9 +36,9 @@ import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.server.balancer.TestBalancer; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; /** * Test fsck with multiple NameNodes @@ -47,7 +47,7 @@ public class TestFsckWithMultipleNameNodes { static final Logger LOG = LoggerFactory.getLogger(TestFsckWithMultipleNameNodes.class); { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNestedEncryptionZones.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNestedEncryptionZones.java index 92187d11283..4a9fa71d463 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNestedEncryptionZones.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNestedEncryptionZones.java @@ -32,12 +32,13 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ToolRunner; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test; +import org.slf4j.LoggerFactory; +import org.slf4j.event.Level; import java.io.File; @@ -103,7 +104,8 @@ public void setup() throws Exception { // enable trash for testing conf.setLong(DFSConfigKeys.FS_TRASH_INTERVAL_KEY, 1); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build(); - Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE); + GenericTestUtils.setLogLevel( + LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE); fs = cluster.getFileSystem(); setProvider(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencing.java index ebd556e0c21..0f2121663b4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencing.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencing.java @@ -58,12 +58,12 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils.DelayAnswer; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; +import org.slf4j.event.Level; public class TestDNFencing { @@ -79,7 +79,7 @@ public class TestDNFencing { private FileSystem fs; static { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } @Before diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencingWithReplication.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencingWithReplication.java index cf2674682d2..3f86d4521e4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencingWithReplication.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDNFencingWithReplication.java @@ -32,9 +32,9 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; import java.util.function.Supplier; @@ -46,8 +46,8 @@ public class TestDNFencingWithReplication { static { GenericTestUtils.setLogLevel(FSNamesystem.auditLog, Level.WARN); - GenericTestUtils.setLogLevel(Server.LOG, Level.FATAL); - GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.FATAL); + GenericTestUtils.setLogLevel(Server.LOG, Level.ERROR); + GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.ERROR); } private static final int NUM_THREADS = 20; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHASafeMode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHASafeMode.java index 176b981a6a5..e17bb6f53cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHASafeMode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHASafeMode.java @@ -88,7 +88,7 @@ public class TestHASafeMode { private MiniDFSCluster cluster; static { - DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); GenericTestUtils.setLogLevel(FSImage.LOG, Level.TRACE); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java index 83dbeeb3a2c..292f7abe6c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestPipelinesFailover.java @@ -58,9 +58,9 @@ import org.apache.hadoop.test.MultithreadedTestUtil.TestContext; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Shell.ShellCommandExecutor; -import org.apache.log4j.Level; import org.junit.Test; import org.mockito.Mockito; +import org.slf4j.event.Level; import java.util.function.Supplier; @@ -69,9 +69,9 @@ */ public class TestPipelinesFailover { static { - GenericTestUtils.setLogLevel(LoggerFactory.getLogger(RetryInvocationHandler - .class), org.slf4j.event.Level.DEBUG); - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + GenericTestUtils.setLogLevel(LoggerFactory.getLogger( + RetryInvocationHandler.class), Level.DEBUG); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } protected static final Logger LOG = LoggerFactory.getLogger( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyBlockManagement.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyBlockManagement.java index 0e60ee03218..74c6f212408 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyBlockManagement.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyBlockManagement.java @@ -29,8 +29,8 @@ import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil; import org.apache.hadoop.hdfs.server.namenode.NameNode; -import org.apache.log4j.Level; import org.junit.Test; +import org.slf4j.event.Level; import static org.junit.Assert.assertEquals; @@ -46,7 +46,7 @@ public class TestStandbyBlockManagement { private static final Path TEST_FILE_PATH = new Path(TEST_FILE); static { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } @Test(timeout=60000) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyIsHot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyIsHot.java index 73f15cf9328..b45e22614c3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyIsHot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestStandbyIsHot.java @@ -39,9 +39,9 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Test; +import org.slf4j.event.Level; import java.util.function.Supplier; @@ -57,7 +57,7 @@ public class TestStandbyIsHot { private static final Path TEST_FILE_PATH = new Path(TEST_FILE); static { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } @Test(timeout=60000) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java index dcb6ddc00a6..1621cd386c1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java @@ -28,11 +28,11 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; -import org.apache.log4j.Level; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.slf4j.event.Level; /** * Test WebHDFS files/directories creation to make sure it follows same rules @@ -42,7 +42,7 @@ public class TestWebHdfsCreatePermissions { static final Logger LOG = LoggerFactory.getLogger(TestWebHdfsCreatePermissions.class); { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } private MiniDFSCluster cluster; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java index e009bc6032c..640994562e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java @@ -45,11 +45,11 @@ import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.PostOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; -import org.apache.log4j.Level; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.slf4j.event.Level; /** * Test WebHDFS which provides data locality using HTTP redirection. @@ -58,7 +58,7 @@ public class TestWebHdfsDataLocality { static final Logger LOG = LoggerFactory.getLogger(TestWebHdfsDataLocality.class); { - DFSTestUtil.setNameNodeLogLevel(Level.ALL); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } private static final String RACK0 = "/rack0"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithMultipleNameNodes.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithMultipleNameNodes.java index 570123d8aa4..119dc50c3bf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithMultipleNameNodes.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsWithMultipleNameNodes.java @@ -48,7 +48,7 @@ static private void setLogLevel() { GenericTestUtils.setLogLevel(LOG, Level.TRACE); GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.TRACE); - DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE); + DFSTestUtil.setNameNodeLogLevel(Level.TRACE); } private static final Configuration conf = new HdfsConfiguration();