HDFS-9130. Use GenericTestUtils#setLogLevel to the logging level. Contributed by Mingliang Liu.

This commit is contained in:
Haohui Mai 2015-09-23 23:59:19 -07:00
parent fbb1d923f4
commit 7e4bd11b5b
60 changed files with 155 additions and 183 deletions

View File

@ -602,6 +602,9 @@ Release 2.8.0 - UNRELEASED
HDFS-8733. Keep server related definition in hdfs.proto on server side. HDFS-8733. Keep server related definition in hdfs.proto on server side.
(Mingliang Liu via wheat9) (Mingliang Liu via wheat9)
HDFS-9130. Use GenericTestUtils#setLogLevel to the logging level.
(Mingliang Liu via wheat9)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -24,12 +24,12 @@ import java.util.concurrent.atomic.*;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.fs.permission.*;
import org.apache.hadoop.hdfs.*; import org.apache.hadoop.hdfs.*;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.junit.Test; import org.junit.Test;
@ -50,7 +50,7 @@ public class TestFuseDFS {
private static final Log LOG = LogFactory.getLog(TestFuseDFS.class); private static final Log LOG = LogFactory.getLog(TestFuseDFS.class);
{ {
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
} }
/** Dump the given intput stream to stderr */ /** Dump the given intput stream to stderr */

View File

@ -24,7 +24,6 @@ import java.util.Random;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fi.FiTestUtil; import org.apache.hadoop.fi.FiTestUtil;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -235,14 +234,14 @@ public class TestFiPipelines {
} }
private static void initLoggers() { private static void initLoggers() {
((Log4JLogger) NameNode.stateChangeLog).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL);
((Log4JLogger) LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LogFactory.getLog(FSNamesystem.class), Level.ALL);
((Log4JLogger) DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
((Log4JLogger) TestFiPipelines.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(TestFiPipelines.LOG, Level.ALL);
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
((Log4JLogger) FiTestUtil.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FiTestUtil.LOG, Level.ALL);
((Log4JLogger) BlockReceiverAspects.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(BlockReceiverAspects.LOG, Level.ALL);
((Log4JLogger) DFSClientAspects.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClientAspects.LOG, Level.ALL);
} }
} }

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.datanode;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fi.DataTransferTestUtil; import org.apache.hadoop.fi.DataTransferTestUtil;
import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest; import org.apache.hadoop.fi.DataTransferTestUtil.DataTransferTest;
@ -40,6 +39,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -64,7 +64,7 @@ public class TestFiDataTransferProtocol {
} }
{ {
((Log4JLogger)DataTransferProtocol.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
} }
/** /**

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -72,9 +73,9 @@ public class TestFiDataTransferProtocol2 {
} }
{ {
((Log4JLogger) BlockReceiver.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(BlockReceiver.LOG, Level.ALL);
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
((Log4JLogger)DataTransferProtocol.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
} }
/** /**
* 1. create files with dfs * 1. create files with dfs

View File

@ -29,9 +29,9 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import static org.apache.hadoop.fs.FileContextTestHelper.*; import static org.apache.hadoop.fs.FileContextTestHelper.*;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -101,7 +101,7 @@ public class TestFcHdfsSetUMask {
{ {
try { try {
((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.DEBUG); GenericTestUtils.setLogLevel(FileSystem.LOG, Level.DEBUG);
} }
catch(Exception e) { catch(Exception e) {
System.out.println("Cannot change log level\n" System.out.println("Cannot change log level\n"

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.fs.ChecksumFileSystem;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
@ -173,7 +174,7 @@ public class BenchmarkThroughput extends Configured implements Tool {
// silence the minidfs cluster // silence the minidfs cluster
Log hadoopLog = LogFactory.getLog("org"); Log hadoopLog = LogFactory.getLog("org");
if (hadoopLog instanceof Log4JLogger) { if (hadoopLog instanceof Log4JLogger) {
((Log4JLogger) hadoopLog).getLogger().setLevel(Level.WARN); GenericTestUtils.setLogLevel(hadoopLog, Level.WARN);
} }
int reps = 1; int reps = 1;
if (args.length == 1) { if (args.length == 1) {

View File

@ -50,7 +50,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -879,7 +878,7 @@ public class TestDFSClientRetries {
public static void namenodeRestartTest(final Configuration conf, public static void namenodeRestartTest(final Configuration conf,
final boolean isWebHDFS) throws Exception { final boolean isWebHDFS) throws Exception {
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
final List<Exception> exceptions = new ArrayList<Exception>(); final List<Exception> exceptions = new ArrayList<Exception>();

View File

@ -45,9 +45,7 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.BlockStorageLocation; import org.apache.hadoop.fs.BlockStorageLocation;
@ -99,7 +97,7 @@ public class TestDistributedFileSystem {
private static final Random RAN = new Random(); private static final Random RAN = new Random();
static { static {
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
} }
private boolean dualPortTesting = false; private boolean dualPortTesting = false;
@ -517,7 +515,7 @@ public class TestDistributedFileSystem {
@Test @Test
public void testFileChecksum() throws Exception { public void testFileChecksum() throws Exception {
((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(HftpFileSystem.LOG, Level.ALL);
final long seed = RAN.nextLong(); final long seed = RAN.nextLong();
System.out.println("seed=" + seed); System.out.println("seed=" + seed);
@ -780,9 +778,9 @@ public class TestDistributedFileSystem {
@Test(timeout=60000) @Test(timeout=60000)
public void testGetFileBlockStorageLocationsBatching() throws Exception { public void testGetFileBlockStorageLocationsBatching() throws Exception {
final Configuration conf = getTestConfiguration(); final Configuration conf = getTestConfiguration();
((Log4JLogger)ProtobufRpcEngine.LOG).getLogger().setLevel(Level.TRACE); GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.TRACE);
((Log4JLogger)BlockStorageLocationUtil.LOG).getLogger().setLevel(Level.TRACE); GenericTestUtils.setLogLevel(BlockStorageLocationUtil.LOG, Level.TRACE);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.TRACE); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
conf.setBoolean(DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED, conf.setBoolean(DFSConfigKeys.DFS_HDFS_BLOCKS_METADATA_ENABLED,
true); true);

View File

@ -26,8 +26,6 @@ import java.util.Arrays;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.ChecksumException;
@ -38,6 +36,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@ -63,9 +62,9 @@ public class TestFileConcurrentReader {
Logger.getLogger(TestFileConcurrentReader.class); Logger.getLogger(TestFileConcurrentReader.class);
{ {
((Log4JLogger) LeaseManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
} }
static final long seed = 0xDEADBEEFL; static final long seed = 0xDEADBEEFL;

View File

@ -52,8 +52,6 @@ import java.net.UnknownHostException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
@ -100,10 +98,9 @@ public class TestFileCreation {
static final String DIR = "/" + TestFileCreation.class.getSimpleName() + "/"; static final String DIR = "/" + TestFileCreation.class.getSimpleName() + "/";
{ {
//((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
} }
private static final String RPC_DETAILED_METRICS = private static final String RPC_DETAILED_METRICS =
"RpcDetailedActivityForPort"; "RpcDetailedActivityForPort";

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -30,6 +28,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager; import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -41,10 +40,10 @@ public class TestFileCreationClient {
static final String DIR = "/" + TestFileCreationClient.class.getSimpleName() + "/"; static final String DIR = "/" + TestFileCreationClient.class.getSimpleName() + "/";
{ {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)InterDatanodeProtocol.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, Level.ALL);
} }
/** Test lease recovery Triggered by DFSClient. */ /** Test lease recovery Triggered by DFSClient. */

View File

@ -27,8 +27,6 @@ import java.io.IOException;
import java.util.Random; import java.util.Random;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
@ -42,6 +40,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.web.HftpFileSystem; import org.apache.hadoop.hdfs.web.HftpFileSystem;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -52,8 +51,8 @@ import org.junit.Test;
*/ */
public class TestFileStatus { public class TestFileStatus {
{ {
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FileSystem.LOG, Level.ALL);
} }
static final long seed = 0xDEADBEEFL; static final long seed = 0xDEADBEEFL;

View File

@ -25,7 +25,6 @@ import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -35,6 +34,7 @@ import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -42,8 +42,8 @@ import org.junit.Test;
* newly introduced {@link FSDataOutputStream#hflush()} method */ * newly introduced {@link FSDataOutputStream#hflush()} method */
public class TestHFlush { public class TestHFlush {
{ {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
} }
private final String fName = "hflushtest.dat"; private final String fName = "hflushtest.dat";

View File

@ -39,11 +39,11 @@ import org.junit.Test;
public class TestLargeBlock { public class TestLargeBlock {
/** /**
{ {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
((Log4JLogger)TestLargeBlock.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(TestLargeBlock.LOG, Level.ALL);
} }
*/ */
private static final Log LOG = LogFactory.getLog(TestLargeBlock.class); private static final Log LOG = LogFactory.getLog(TestLargeBlock.class);

View File

@ -28,7 +28,6 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -47,6 +46,7 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
@ -58,9 +58,9 @@ public class TestLeaseRecovery2 {
public static final Log LOG = LogFactory.getLog(TestLeaseRecovery2.class); public static final Log LOG = LogFactory.getLog(TestLeaseRecovery2.class);
{ {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
} }
static final private long BLOCK_SIZE = 1024; static final private long BLOCK_SIZE = 1024;

View File

@ -17,10 +17,10 @@
*/ */
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.TestListFiles; import org.apache.hadoop.fs.TestListFiles;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -30,7 +30,7 @@ import org.junit.BeforeClass;
*/ */
public class TestListFilesInDFS extends TestListFiles { public class TestListFilesInDFS extends TestListFiles {
{ {
((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FileSystem.LOG, Level.ALL);
} }

View File

@ -25,7 +25,6 @@ import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -36,6 +35,7 @@ import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -47,7 +47,7 @@ import org.junit.Test;
*/ */
public class TestListFilesInFileContext { public class TestListFilesInFileContext {
{ {
((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FileSystem.LOG, Level.ALL);
} }
static final long seed = 0xDEADBEEFL; static final long seed = 0xDEADBEEFL;

View File

@ -26,7 +26,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -53,8 +52,8 @@ import org.junit.Test;
*/ */
public class TestPersistBlocks { public class TestPersistBlocks {
static { static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
} }
private static final int BLOCK_SIZE = 4096; private static final int BLOCK_SIZE = 4096;

View File

@ -21,8 +21,6 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -33,6 +31,7 @@ import org.apache.hadoop.hdfs.protocol.RecoveryInProgressException;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -40,8 +39,8 @@ import org.junit.Test;
/** Test reading from hdfs while a file is being written. */ /** Test reading from hdfs while a file is being written. */
public class TestReadWhileWriting { public class TestReadWhileWriting {
{ {
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
} }
private static final String DIR = "/" private static final String DIR = "/"

View File

@ -37,7 +37,6 @@ import java.util.concurrent.ExecutorService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdfs.qjournal.MiniJournalCluster; import org.apache.hadoop.hdfs.qjournal.MiniJournalCluster;
@ -209,7 +208,7 @@ public class TestQJMWithFaults {
// If the user specifies a seed, then we should gather all the // If the user specifies a seed, then we should gather all the
// IPC trace information so that debugging is easier. This makes // IPC trace information so that debugging is easier. This makes
// the test run about 25% slower otherwise. // the test run about 25% slower otherwise.
((Log4JLogger)ProtobufRpcEngine.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.ALL);
} else { } else {
seed = new Random().nextLong(); seed = new Random().nextLong();
} }

View File

@ -41,7 +41,6 @@ import java.util.concurrent.ExecutorService;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdfs.qjournal.MiniJournalCluster; import org.apache.hadoop.hdfs.qjournal.MiniJournalCluster;
@ -84,7 +83,7 @@ public class TestQuorumJournalManager {
private final List<QuorumJournalManager> toClose = Lists.newLinkedList(); private final List<QuorumJournalManager> toClose = Lists.newLinkedList();
static { static {
((Log4JLogger)ProtobufRpcEngine.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.ALL);
} }
@Before @Before

View File

@ -27,7 +27,6 @@ import java.util.List;
import org.junit.Assert; import org.junit.Assert;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.qjournal.client.AsyncLogger; import org.apache.hadoop.hdfs.qjournal.client.AsyncLogger;
import org.apache.hadoop.hdfs.qjournal.client.QuorumException; import org.apache.hadoop.hdfs.qjournal.client.QuorumException;
@ -56,7 +55,7 @@ import static org.apache.hadoop.hdfs.qjournal.QJMTestUtil.writeOp;
*/ */
public class TestQuorumJournalManagerUnit { public class TestQuorumJournalManagerUnit {
static { static {
((Log4JLogger)QuorumJournalManager.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(QuorumJournalManager.LOG, Level.ALL);
} }
private static final NamespaceInfo FAKE_NSINFO = new NamespaceInfo( private static final NamespaceInfo FAKE_NSINFO = new NamespaceInfo(
12345, "mycluster", "my-bp", 0L); 12345, "mycluster", "my-bp", 0L);

View File

@ -26,7 +26,6 @@ import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
@ -44,6 +43,7 @@ import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -62,11 +62,11 @@ public class TestClientProtocolWithDelegationToken {
} }
static { static {
((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Client.LOG, Level.ALL);
((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Server.LOG, Level.ALL);
((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.ALL);
((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.ALL);
((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.ALL);
} }
@Test @Test

View File

@ -30,7 +30,6 @@ import java.net.URI;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -54,6 +53,7 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
@ -172,7 +172,7 @@ public class TestDelegationToken {
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@Test @Test
public void testDelegationTokenWebHdfsApi() throws Exception { public void testDelegationTokenWebHdfsApi() throws Exception {
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
final String uri = WebHdfsConstants.WEBHDFS_SCHEME + "://" final String uri = WebHdfsConstants.WEBHDFS_SCHEME + "://"
+ config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
//get file system as JobTracker //get file system as JobTracker

View File

@ -36,7 +36,6 @@ import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -70,6 +69,7 @@ import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
@ -89,11 +89,11 @@ public class TestBlockToken {
private static final String ADDRESS = "0.0.0.0"; private static final String ADDRESS = "0.0.0.0";
static { static {
((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Client.LOG, Level.ALL);
((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Server.LOG, Level.ALL);
((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.ALL);
((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.ALL);
((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.ALL);
} }
/** Directory where we can count our open file descriptors under Linux */ /** Directory where we can count our open file descriptors under Linux */

View File

@ -51,7 +51,6 @@ import java.util.concurrent.TimeoutException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -94,7 +93,7 @@ public class TestBalancer {
private static final Log LOG = LogFactory.getLog(TestBalancer.class); private static final Log LOG = LogFactory.getLog(TestBalancer.class);
static { static {
((Log4JLogger)Balancer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Balancer.LOG, Level.ALL);
} }
final static long CAPACITY = 5000L; final static long CAPACITY = 5000L;

View File

@ -31,7 +31,6 @@ import java.util.Set;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -50,6 +49,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.balancer.BalancerParameters; import org.apache.hadoop.hdfs.server.balancer.BalancerParameters;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -60,7 +60,7 @@ import org.junit.Test;
public class TestBalancerWithMultipleNameNodes { public class TestBalancerWithMultipleNameNodes {
static final Log LOG = Balancer.LOG; static final Log LOG = Balancer.LOG;
{ {
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
DFSTestUtil.setNameNodeLogLevel(Level.ALL); DFSTestUtil.setNameNodeLogLevel(Level.ALL);
} }

View File

@ -28,7 +28,6 @@ import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -63,6 +62,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -77,7 +77,7 @@ public class TestBlockTokenWithDFS {
private final byte[] rawData = new byte[FILE_SIZE]; private final byte[] rawData = new byte[FILE_SIZE];
{ {
((Log4JLogger) DFSClient.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
Random r = new Random(); Random r = new Random();
r.nextBytes(rawData); r.nextBytes(rawData);
} }

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -39,14 +38,15 @@ import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
public class TestBlocksWithNotEnoughRacks { public class TestBlocksWithNotEnoughRacks {
public static final Log LOG = LogFactory.getLog(TestBlocksWithNotEnoughRacks.class); public static final Log LOG = LogFactory.getLog(TestBlocksWithNotEnoughRacks.class);
static { static {
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
} }
/* /*

View File

@ -37,7 +37,6 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -61,6 +60,7 @@ import org.apache.hadoop.hdfs.server.namenode.Namesystem;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node; import org.apache.hadoop.net.Node;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.test.PathUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@ -72,7 +72,7 @@ import org.junit.rules.ExpectedException;
public class TestReplicationPolicy { public class TestReplicationPolicy {
{ {
((Log4JLogger)BlockPlacementPolicy.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL);
} }
private static final int BLOCK_SIZE = 1024; private static final int BLOCK_SIZE = 1024;

View File

@ -33,7 +33,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
@ -87,7 +86,7 @@ public class TestBPOfferService {
private long secondCallTime = 0; private long secondCallTime = 0;
static { static {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
} }
private DatanodeProtocolClientSideTranslatorPB mockNN1; private DatanodeProtocolClientSideTranslatorPB mockNN1;

View File

@ -44,7 +44,6 @@ import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -114,8 +113,8 @@ public class TestBlockRecovery {
BLOCK_ID, BLOCK_LEN, GEN_STAMP); BLOCK_ID, BLOCK_LEN, GEN_STAMP);
static { static {
((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
} }
/** /**

View File

@ -30,7 +30,6 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.ReconfigurationException; import org.apache.hadoop.conf.ReconfigurationException;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -45,6 +44,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -57,7 +57,8 @@ public class TestDataNodeVolumeFailureReporting {
private static final Log LOG = LogFactory.getLog(TestDataNodeVolumeFailureReporting.class); private static final Log LOG = LogFactory.getLog(TestDataNodeVolumeFailureReporting.class);
{ {
((Log4JLogger)TestDataNodeVolumeFailureReporting.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(TestDataNodeVolumeFailureReporting.LOG,
Level.ALL);
} }
private FileSystem fs; private FileSystem fs;

View File

@ -32,7 +32,6 @@ import java.util.concurrent.ThreadLocalRandom;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -83,7 +82,7 @@ public class TestDatanodeProtocolRetryPolicy {
DFSTestUtil.getLocalDatanodeRegistration(); DFSTestUtil.getLocalDatanodeRegistration();
static { static {
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
} }
/** /**

View File

@ -22,7 +22,6 @@ import java.util.Random;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClientAdapter; import org.apache.hadoop.hdfs.DFSClientAdapter;
@ -38,6 +37,7 @@ import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -47,7 +47,7 @@ public class TestTransferRbw {
private static final Log LOG = LogFactory.getLog(TestTransferRbw.class); private static final Log LOG = LogFactory.getLog(TestTransferRbw.class);
{ {
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
} }
private static final Random RAN = new Random(); private static final Random RAN = new Random();

View File

@ -22,7 +22,6 @@ import com.google.common.base.Supplier;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*; import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
@ -93,8 +92,8 @@ public class TestSpaceReservation {
} }
static { static {
((Log4JLogger) FsDatasetImpl.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.ALL);
((Log4JLogger) DataNode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
} }
/** /**

View File

@ -28,7 +28,6 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -62,6 +61,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -77,12 +77,11 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LAZY_WRITER_INTE
public class TestStorageMover { public class TestStorageMover {
static final Log LOG = LogFactory.getLog(TestStorageMover.class); static final Log LOG = LogFactory.getLog(TestStorageMover.class);
static { static {
((Log4JLogger)LogFactory.getLog(BlockPlacementPolicy.class) GenericTestUtils.setLogLevel(LogFactory.getLog(BlockPlacementPolicy.class),
).getLogger().setLevel(Level.ALL); Level.ALL);
((Log4JLogger)LogFactory.getLog(Dispatcher.class) GenericTestUtils.setLogLevel(LogFactory.getLog(Dispatcher.class),
).getLogger().setLevel(Level.ALL); Level.ALL);
((Log4JLogger)LogFactory.getLog(DataTransferProtocol.class)).getLogger() GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
.setLevel(Level.ALL);
} }
private static final int BLOCK_SIZE = 1024; private static final int BLOCK_SIZE = 1024;

View File

@ -22,11 +22,11 @@ import com.google.common.base.Joiner;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.DefaultAuditLogger; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.DefaultAuditLogger;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -60,7 +60,7 @@ public class TestAuditLogAtDebug {
Joiner.on(",").join(debugCommands.get())); Joiner.on(",").join(debugCommands.get()));
} }
logger.initialize(conf); logger.initialize(conf);
((Log4JLogger) FSNamesystem.auditLog).getLogger().setLevel(level); GenericTestUtils.setLogLevel(FSNamesystem.auditLog, level);
return spy(logger); return spy(logger);
} }

View File

@ -30,7 +30,6 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
@ -62,8 +61,8 @@ public class TestBackupNode {
static { static {
((Log4JLogger)Checkpointer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(Checkpointer.LOG, Level.ALL);
((Log4JLogger)BackupImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(BackupImage.LOG, Level.ALL);
} }
static final String BASE_DIR = MiniDFSCluster.getBaseDirectory(); static final String BASE_DIR = MiniDFSCluster.getBaseDirectory();

View File

@ -48,7 +48,6 @@ import com.google.common.io.Files;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@ -110,7 +109,7 @@ import com.google.common.primitives.Ints;
public class TestCheckpoint { public class TestCheckpoint {
static { static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
} }
static final Log LOG = LogFactory.getLog(TestCheckpoint.class); static final Log LOG = LogFactory.getLog(TestCheckpoint.class);

View File

@ -56,7 +56,6 @@ import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -102,7 +101,7 @@ import com.google.common.collect.Lists;
public class TestEditLog { public class TestEditLog {
static { static {
((Log4JLogger)FSEditLog.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
} }
/** /**

View File

@ -32,7 +32,6 @@ import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
@ -47,6 +46,7 @@ import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.namenode.JournalSet.JournalAndStream; import org.apache.hadoop.hdfs.server.namenode.JournalSet.JournalAndStream;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -59,7 +59,7 @@ import org.mockito.stubbing.Answer;
*/ */
public class TestEditLogRace { public class TestEditLogRace {
static { static {
((Log4JLogger)FSEditLog.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
} }
private static final Log LOG = LogFactory.getLog(TestEditLogRace.class); private static final Log LOG = LogFactory.getLog(TestEditLogRace.class);

View File

@ -35,7 +35,6 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -48,6 +47,7 @@ import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.EditLogValidation; import org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.EditLogValidation;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.test.PathUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -58,8 +58,8 @@ import com.google.common.io.Files;
public class TestFSEditLogLoader { public class TestFSEditLogLoader {
static { static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
((Log4JLogger)FSEditLogLoader.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSEditLogLoader.LOG, Level.ALL);
} }
private static final File TEST_DIR = PathUtils.getTestDir(TestFSEditLogLoader.class); private static final File TEST_DIR = PathUtils.getTestDir(TestFSEditLogLoader.class);

View File

@ -27,7 +27,6 @@ import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -46,6 +45,7 @@ import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeat
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper; import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper;
import org.apache.hadoop.hdfs.util.Canceler; import org.apache.hadoop.hdfs.util.Canceler;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
@ -58,7 +58,7 @@ import org.junit.Test;
public class TestFSImageWithSnapshot { public class TestFSImageWithSnapshot {
{ {
SnapshotTestHelper.disableLogs(); SnapshotTestHelper.disableLogs();
((Log4JLogger)INode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(INode.LOG, Level.ALL);
} }
static final long seed = 0; static final long seed = 0;

View File

@ -30,7 +30,6 @@ import java.util.EnumSet;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
@ -43,6 +42,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
@ -52,7 +52,8 @@ import org.junit.Test;
public class TestFavoredNodesEndToEnd { public class TestFavoredNodesEndToEnd {
{ {
((Log4JLogger)LogFactory.getLog(BlockPlacementPolicy.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LogFactory.getLog(BlockPlacementPolicy.class),
Level.ALL);
} }
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;

View File

@ -92,6 +92,7 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@ -134,12 +135,12 @@ public class TestFsck {
throws Exception { throws Exception {
ByteArrayOutputStream bStream = new ByteArrayOutputStream(); ByteArrayOutputStream bStream = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bStream, true); PrintStream out = new PrintStream(bStream, true);
((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSPermissionChecker.LOG, Level.ALL);
int errCode = ToolRunner.run(new DFSck(conf, out), path); int errCode = ToolRunner.run(new DFSck(conf, out), path);
if (checkErrorCode) { if (checkErrorCode) {
assertEquals(expectedErrCode, errCode); assertEquals(expectedErrCode, errCode);
} }
((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.INFO); GenericTestUtils.setLogLevel(FSPermissionChecker.LOG, Level.INFO);
FSImage.LOG.error("OUTPUT = " + bStream.toString()); FSImage.LOG.error("OUTPUT = " + bStream.toString());
return bStream.toString(); return bStream.toString();
} }

View File

@ -37,7 +37,6 @@ import java.util.concurrent.Future;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -79,7 +78,7 @@ import org.mockito.stubbing.Answer;
*/ */
public class TestSaveNamespace { public class TestSaveNamespace {
static { static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
} }
private static final Log LOG = LogFactory.getLog(TestSaveNamespace.class); private static final Log LOG = LogFactory.getLog(TestSaveNamespace.class);

View File

@ -20,8 +20,6 @@ package org.apache.hadoop.hdfs.server.namenode.ha;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -29,6 +27,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.io.retry.RetryInvocationHandler;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread; import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread;
@ -46,11 +45,9 @@ import com.google.common.base.Supplier;
*/ */
public class TestDNFencingWithReplication { public class TestDNFencingWithReplication {
static { static {
((Log4JLogger)FSNamesystem.auditLog).getLogger().setLevel(Level.WARN); GenericTestUtils.setLogLevel(FSNamesystem.auditLog, Level.WARN);
((Log4JLogger)Server.LOG).getLogger().setLevel(Level.FATAL); GenericTestUtils.setLogLevel(Server.LOG, Level.FATAL);
((Log4JLogger)LogFactory.getLog( GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.FATAL);
"org.apache.hadoop.io.retry.RetryInvocationHandler"))
.getLogger().setLevel(Level.FATAL);
} }
private static final int NUM_THREADS = 20; private static final int NUM_THREADS = 20;

View File

@ -23,7 +23,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.permission.PermissionStatus;
@ -51,8 +50,8 @@ public class TestEditLogTailer {
static final long NN_LAG_TIMEOUT = 10 * 1000; static final long NN_LAG_TIMEOUT = 10 * 1000;
static { static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
((Log4JLogger)EditLogTailer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(EditLogTailer.LOG, Level.ALL);
} }
@Test @Test

View File

@ -23,8 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.PrintStream; import java.io.PrintStream;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -32,6 +30,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.tools.DFSck; import org.apache.hadoop.hdfs.tools.DFSck;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Test; import org.junit.Test;
@ -39,7 +38,7 @@ import org.junit.Test;
public class TestHAFsck { public class TestHAFsck {
static { static {
((Log4JLogger)LogFactory.getLog(DFSUtil.class)).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(DFSUtil.LOG, Level.ALL);
} }
/** /**

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.server.namenode.ha;
import com.google.common.util.concurrent.Uninterruptibles; import com.google.common.util.concurrent.Uninterruptibles;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -74,7 +73,7 @@ public class TestHAStateTransitions {
RequestSource.REQUEST_BY_USER_FORCED); RequestSource.REQUEST_BY_USER_FORCED);
static { static {
((Log4JLogger)EditLogTailer.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(EditLogTailer.LOG, Level.ALL);
} }
/** /**

View File

@ -31,7 +31,6 @@ import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -57,6 +56,7 @@ import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsSource; import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.MetricsAsserts; import org.apache.hadoop.test.MetricsAsserts;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
@ -93,8 +93,8 @@ public class TestNameNodeMetrics {
"" + PERCENTILES_INTERVAL); "" + PERCENTILES_INTERVAL);
// Enable stale DataNodes checking // Enable stale DataNodes checking
CONF.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true); CONF.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true);
((Log4JLogger)LogFactory.getLog(MetricsAsserts.class)) GenericTestUtils.setLogLevel(LogFactory.getLog(MetricsAsserts.class),
.getLogger().setLevel(Level.DEBUG); Level.DEBUG);
} }
private MiniDFSCluster cluster; private MiniDFSCluster cluster;

View File

@ -26,7 +26,6 @@ import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -48,6 +47,7 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -58,7 +58,7 @@ import org.junit.Test;
*/ */
public class TestINodeFileUnderConstructionWithSnapshot { public class TestINodeFileUnderConstructionWithSnapshot {
{ {
((Log4JLogger)INode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(INode.LOG, Level.ALL);
SnapshotTestHelper.disableLogs(); SnapshotTestHelper.disableLogs();
} }

View File

@ -34,7 +34,6 @@ import java.util.HashMap;
import java.util.Random; import java.util.Random;
import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -74,7 +73,7 @@ import org.junit.rules.ExpectedException;
*/ */
public class TestSnapshot { public class TestSnapshot {
{ {
((Log4JLogger)INode.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(INode.LOG, Level.ALL);
SnapshotTestHelper.disableLogs(); SnapshotTestHelper.disableLogs();
} }

View File

@ -28,7 +28,6 @@ import java.io.PrintStream;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAAdmin; import org.apache.hadoop.ha.HAAdmin;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@ -37,6 +36,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.After; import org.junit.After;
@ -52,7 +52,7 @@ import com.google.common.io.Files;
*/ */
public class TestDFSHAAdminMiniCluster { public class TestDFSHAAdminMiniCluster {
static { static {
((Log4JLogger)LogFactory.getLog(HAAdmin.class)).getLogger().setLevel( GenericTestUtils.setLogLevel(LogFactory.getLog(HAAdmin.class),
Level.ALL); Level.ALL);
} }
private static final Log LOG = LogFactory.getLog(TestDFSHAAdminMiniCluster.class); private static final Log LOG = LogFactory.getLog(TestDFSHAAdminMiniCluster.class);

View File

@ -22,7 +22,6 @@ import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
@ -34,12 +33,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.util.ByteArrayManager.Counter; import org.apache.hadoop.hdfs.util.ByteArrayManager.Counter;
import org.apache.hadoop.hdfs.util.ByteArrayManager.CounterMap; import org.apache.hadoop.hdfs.util.ByteArrayManager.CounterMap;
import org.apache.hadoop.hdfs.util.ByteArrayManager.FixedLengthManager; import org.apache.hadoop.hdfs.util.ByteArrayManager.FixedLengthManager;
import org.apache.hadoop.hdfs.util.ByteArrayManager.ManagerMap; import org.apache.hadoop.hdfs.util.ByteArrayManager.ManagerMap;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.Assert; import org.junit.Assert;
@ -50,8 +49,8 @@ import org.junit.Test;
*/ */
public class TestByteArrayManager { public class TestByteArrayManager {
static { static {
((Log4JLogger)LogFactory.getLog(ByteArrayManager.class) GenericTestUtils.setLogLevel(LogFactory.getLog(ByteArrayManager.class),
).getLogger().setLevel(Level.ALL); Level.ALL);
} }
static final Log LOG = LogFactory.getLog(TestByteArrayManager.class); static final Log LOG = LogFactory.getLog(TestByteArrayManager.class);
@ -560,8 +559,8 @@ public class TestByteArrayManager {
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
((Log4JLogger)LogFactory.getLog(ByteArrayManager.class) GenericTestUtils.setLogLevel(LogFactory.getLog(ByteArrayManager.class),
).getLogger().setLevel(Level.OFF); Level.OFF);
final int arrayLength = 64 * 1024; //64k final int arrayLength = 64 * 1024; //64k
final int nThreads = 512; final int nThreads = 512;

View File

@ -27,7 +27,6 @@ import java.net.URI;
import java.net.URL; import java.net.URL;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSMainOperationsBaseTest; import org.apache.hadoop.fs.FSMainOperationsBaseTest;
@ -45,6 +44,7 @@ import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
@ -53,7 +53,7 @@ import org.junit.Test;
public class TestFSMainOperationsWebHdfs extends FSMainOperationsBaseTest { public class TestFSMainOperationsWebHdfs extends FSMainOperationsBaseTest {
{ {
((Log4JLogger)ExceptionHandler.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(ExceptionHandler.LOG, Level.ALL);
} }
private static MiniDFSCluster cluster = null; private static MiniDFSCluster cluster = null;

View File

@ -36,7 +36,6 @@ import java.util.Random;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -46,7 +45,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
@ -237,7 +235,7 @@ public class TestWebHDFS {
/** Test client retry with namenode restarting. */ /** Test client retry with namenode restarting. */
@Test(timeout=300000) @Test(timeout=300000)
public void testNamenodeRestart() throws Exception { public void testNamenodeRestart() throws Exception {
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
final Configuration conf = WebHdfsTestUtil.createConf(); final Configuration conf = WebHdfsTestUtil.createConf();
TestDFSClientRetries.namenodeRestartTest(conf, true); TestDFSClientRetries.namenodeRestartTest(conf, true);
} }

View File

@ -21,7 +21,6 @@ import java.net.InetSocketAddress;
import java.net.URI; import java.net.URI;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
@ -47,7 +46,7 @@ public class TestWebHdfsWithMultipleNameNodes {
static final Log LOG = WebHdfsTestUtil.LOG; static final Log LOG = WebHdfsTestUtil.LOG;
static private void setLogLevel() { static private void setLogLevel() {
((Log4JLogger)LOG).getLogger().setLevel(Level.ALL); GenericTestUtils.setLogLevel(LOG, Level.ALL);
GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL); GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.ALL);
DFSTestUtil.setNameNodeLogLevel(Level.ALL); DFSTestUtil.setNameNodeLogLevel(Level.ALL);