HDFS-15834. Remove the usage of org.apache.log4j.Level (#2696)
This commit is contained in:
parent
4cf3531583
commit
0ed9e36282
|
@ -39,7 +39,6 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.LambdaTestUtils;
|
import org.apache.hadoop.test.LambdaTestUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
@ -57,6 +56,7 @@ import static org.mockito.Mockito.when;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestRequestHedgingProxyProvider {
|
public class TestRequestHedgingProxyProvider {
|
||||||
|
|
||||||
|
|
|
@ -24,11 +24,11 @@ import org.apache.hadoop.hdfs.util.ByteArrayManager.FixedLengthManager;
|
||||||
import org.apache.hadoop.hdfs.util.ByteArrayManager.ManagerMap;
|
import org.apache.hadoop.hdfs.util.ByteArrayManager.ManagerMap;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
public class TestByteArrayManager {
|
public class TestByteArrayManager {
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(
|
GenericTestUtils.setLogLevel(
|
||||||
LoggerFactory.getLogger(ByteArrayManager.class), Level.ALL);
|
LoggerFactory.getLogger(ByteArrayManager.class), Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final Logger LOG = LoggerFactory.getLogger(TestByteArrayManager.class);
|
static final Logger LOG = LoggerFactory.getLogger(TestByteArrayManager.class);
|
||||||
|
@ -559,8 +559,8 @@ public class TestByteArrayManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(ByteArrayManager.class),
|
GenericTestUtils.disableLog(
|
||||||
Level.OFF);
|
LoggerFactory.getLogger(ByteArrayManager.class));
|
||||||
final int arrayLength = 64 * 1024; //64k
|
final int arrayLength = 64 * 1024; //64k
|
||||||
final int nThreads = 512;
|
final int nThreads = 512;
|
||||||
final int nAllocations = 1 << 15;
|
final int nAllocations = 1 << 15;
|
||||||
|
|
|
@ -21,7 +21,7 @@ import java.net.URI;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.concurrent.atomic.*;
|
import java.util.concurrent.atomic.*;
|
||||||
|
|
||||||
import org.apache.log4j.Level;
|
import org.slf4j.event.Level;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -50,7 +50,7 @@ public class TestFuseDFS {
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestFuseDFS.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TestFuseDFS.class);
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Dump the given intput stream to stderr */
|
/** Dump the given intput stream to stderr */
|
||||||
|
|
|
@ -54,14 +54,15 @@ import org.apache.hadoop.hdfs.server.federation.resolver.MountTableResolver;
|
||||||
import org.apache.hadoop.hdfs.server.federation.resolver.NamenodeStatusReport;
|
import org.apache.hadoop.hdfs.server.federation.resolver.NamenodeStatusReport;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
|
||||||
import org.apache.hadoop.http.HttpConfig;
|
import org.apache.hadoop.http.HttpConfig;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test namenodes monitor behavior in the Router.
|
* Test namenodes monitor behavior in the Router.
|
||||||
|
@ -300,7 +301,7 @@ public class TestRouterNamenodeMonitoring {
|
||||||
final org.apache.log4j.Logger logger =
|
final org.apache.log4j.Logger logger =
|
||||||
org.apache.log4j.Logger.getRootLogger();
|
org.apache.log4j.Logger.getRootLogger();
|
||||||
logger.addAppender(appender);
|
logger.addAppender(appender);
|
||||||
logger.setLevel(Level.DEBUG);
|
GenericTestUtils.setRootLogLevel(Level.DEBUG);
|
||||||
|
|
||||||
// Setup and start the Router
|
// Setup and start the Router
|
||||||
Configuration conf = getNamenodesConfig();
|
Configuration conf = getNamenodesConfig();
|
||||||
|
|
|
@ -38,10 +38,10 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test symbolic links in Hdfs.
|
* Test symbolic links in Hdfs.
|
||||||
|
@ -49,7 +49,7 @@ import org.junit.Test;
|
||||||
abstract public class TestSymlinkHdfs extends SymlinkBaseTest {
|
abstract public class TestSymlinkHdfs extends SymlinkBaseTest {
|
||||||
|
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL);
|
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static MiniDFSCluster cluster;
|
protected static MiniDFSCluster cluster;
|
||||||
|
|
|
@ -190,12 +190,12 @@ import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Assume;
|
import org.junit.Assume;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
|
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/** Utilities for HDFS tests */
|
/** Utilities for HDFS tests */
|
||||||
public class DFSTestUtil {
|
public class DFSTestUtil {
|
||||||
|
@ -1992,15 +1992,6 @@ public class DFSTestUtil {
|
||||||
GenericTestUtils.setLogLevel(NameNode.blockStateChangeLog, level);
|
GenericTestUtils.setLogLevel(NameNode.blockStateChangeLog, level);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setNameNodeLogLevel(org.slf4j.event.Level level) {
|
|
||||||
GenericTestUtils.setLogLevel(FSNamesystem.LOG, level);
|
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, level);
|
|
||||||
GenericTestUtils.setLogLevel(LeaseManager.LOG, level);
|
|
||||||
GenericTestUtils.setLogLevel(NameNode.LOG, level);
|
|
||||||
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, level);
|
|
||||||
GenericTestUtils.setLogLevel(NameNode.blockStateChangeLog, level);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the NamenodeProtocol RPC proxy for the NN associated with this
|
* Get the NamenodeProtocol RPC proxy for the NN associated with this
|
||||||
* DFSClient object
|
* DFSClient object
|
||||||
|
|
|
@ -45,13 +45,13 @@ import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.BlockWrite.ReplaceData
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.TestFileTruncate;
|
import org.apache.hadoop.hdfs.server.namenode.TestFileTruncate;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test randomly mixing append, snapshot and truncate operations.
|
* Test randomly mixing append, snapshot and truncate operations.
|
||||||
|
@ -60,7 +60,7 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||||
*/
|
*/
|
||||||
public class TestAppendSnapshotTruncate {
|
public class TestAppendSnapshotTruncate {
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.ALL);
|
GenericTestUtils.setLogLevel(NameNode.stateChangeLog, Level.TRACE);
|
||||||
}
|
}
|
||||||
private static final Logger LOG =
|
private static final Logger LOG =
|
||||||
LoggerFactory.getLogger(TestAppendSnapshotTruncate.class);
|
LoggerFactory.getLogger(TestAppendSnapshotTruncate.class);
|
||||||
|
|
|
@ -90,7 +90,6 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -98,6 +97,7 @@ import org.mockito.Mockito;
|
||||||
import org.mockito.internal.stubbing.answers.ThrowsException;
|
import org.mockito.internal.stubbing.answers.ThrowsException;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
import org.mockito.stubbing.Answer;
|
import org.mockito.stubbing.Answer;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -959,7 +959,7 @@ public class TestDFSClientRetries {
|
||||||
|
|
||||||
public static void namenodeRestartTest(final Configuration conf,
|
public static void namenodeRestartTest(final Configuration conf,
|
||||||
final boolean isWebHDFS) throws Exception {
|
final boolean isWebHDFS) throws Exception {
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
|
|
||||||
final List<Exception> exceptions = new ArrayList<Exception>();
|
final List<Exception> exceptions = new ArrayList<Exception>();
|
||||||
|
|
||||||
|
|
|
@ -20,12 +20,12 @@ package org.apache.hadoop.hdfs;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
|
@ -37,7 +37,7 @@ public class TestDFSClientSocketSize {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(
|
private static final Logger LOG = LoggerFactory.getLogger(
|
||||||
TestDFSClientSocketSize.class);
|
TestDFSClientSocketSize.class);
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.apache.commons.lang3.RandomStringUtils;
|
import org.apache.commons.lang3.RandomStringUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
|
@ -72,6 +71,7 @@ import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
||||||
import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS;
|
import static org.apache.hadoop.fs.permission.AclEntryScope.ACCESS;
|
||||||
|
@ -1962,7 +1962,7 @@ public class TestDFSShell {
|
||||||
|
|
||||||
@Test (timeout = 30000)
|
@Test (timeout = 30000)
|
||||||
public void testGet() throws IOException {
|
public void testGet() throws IOException {
|
||||||
GenericTestUtils.setLogLevel(FSInputChecker.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(FSInputChecker.LOG, Level.TRACE);
|
||||||
|
|
||||||
final String fname = "testGet.txt";
|
final String fname = "testGet.txt";
|
||||||
Path root = new Path("/test/get");
|
Path root = new Path("/test/get");
|
||||||
|
|
|
@ -47,20 +47,20 @@ import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.Timeout;
|
import org.junit.rules.Timeout;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestDFSStripedOutputStream {
|
public class TestDFSStripedOutputStream {
|
||||||
public static final Logger LOG = LoggerFactory.getLogger(
|
public static final Logger LOG = LoggerFactory.getLogger(
|
||||||
TestDFSStripedOutputStream.class);
|
TestDFSStripedOutputStream.class);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private ErasureCodingPolicy ecPolicy;
|
private ErasureCodingPolicy ecPolicy;
|
||||||
|
|
|
@ -34,19 +34,18 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests that pipelines survive data node death and recovery.
|
* This class tests that pipelines survive data node death and recovery.
|
||||||
*/
|
*/
|
||||||
public class TestDatanodeDeath {
|
public class TestDatanodeDeath {
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, org.slf4j
|
GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, Level.TRACE);
|
||||||
.event.Level.TRACE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static final int blockSize = 8192;
|
static final int blockSize = 8192;
|
||||||
|
|
|
@ -73,13 +73,13 @@ import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStatistics;
|
||||||
import org.apache.hadoop.hdfs.tools.DFSAdmin;
|
import org.apache.hadoop.hdfs.tools.DFSAdmin;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.eclipse.jetty.util.ajax.JSON;
|
import org.eclipse.jetty.util.ajax.JSON;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests the decommissioning of nodes.
|
* This class tests the decommissioning of nodes.
|
||||||
|
@ -1225,8 +1225,8 @@ public class TestDecommission extends AdminStatesBaseTest {
|
||||||
|
|
||||||
@Test(timeout=120000)
|
@Test(timeout=120000)
|
||||||
public void testBlocksPerInterval() throws Exception {
|
public void testBlocksPerInterval() throws Exception {
|
||||||
org.apache.log4j.Logger.getLogger(DatanodeAdminManager.class)
|
GenericTestUtils.setLogLevel(
|
||||||
.setLevel(Level.TRACE);
|
LoggerFactory.getLogger(DatanodeAdminManager.class), Level.TRACE);
|
||||||
// Turn the blocks per interval way down
|
// Turn the blocks per interval way down
|
||||||
getConf().setInt(
|
getConf().setInt(
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY,
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_BLOCKS_PER_INTERVAL_KEY,
|
||||||
|
@ -1327,8 +1327,8 @@ public class TestDecommission extends AdminStatesBaseTest {
|
||||||
|
|
||||||
@Test(timeout=120000)
|
@Test(timeout=120000)
|
||||||
public void testPendingNodes() throws Exception {
|
public void testPendingNodes() throws Exception {
|
||||||
org.apache.log4j.Logger.getLogger(DatanodeAdminManager.class)
|
GenericTestUtils.setLogLevel(
|
||||||
.setLevel(Level.TRACE);
|
LoggerFactory.getLogger(DatanodeAdminManager.class), Level.TRACE);
|
||||||
// Only allow one node to be decom'd at a time
|
// Only allow one node to be decom'd at a time
|
||||||
getConf().setInt(
|
getConf().setInt(
|
||||||
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES,
|
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MAX_CONCURRENT_TRACKED_NODES,
|
||||||
|
|
|
@ -53,8 +53,6 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
|
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.LogManager;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -65,12 +63,15 @@ import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.Parameterized;
|
import org.junit.runners.Parameterized;
|
||||||
import org.junit.runners.Parameterized.Parameters;
|
import org.junit.runners.Parameterized.Parameters;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class TestEncryptedTransfer {
|
public class TestEncryptedTransfer {
|
||||||
{
|
{
|
||||||
LogManager.getLogger(SaslDataTransferServer.class).setLevel(Level.DEBUG);
|
GenericTestUtils.setLogLevel(
|
||||||
LogManager.getLogger(DataTransferSaslUtil.class).setLevel(Level.DEBUG);
|
LoggerFactory.getLogger(SaslDataTransferServer.class), Level.DEBUG);
|
||||||
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(DataTransferSaslUtil.class), Level.DEBUG);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
|
|
|
@ -43,6 +43,7 @@ import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.Future;
|
import java.util.concurrent.Future;
|
||||||
|
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -103,8 +104,6 @@ import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension;
|
import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension;
|
||||||
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
|
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -146,6 +145,9 @@ import static org.junit.Assert.assertNull;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
import org.xml.sax.InputSource;
|
import org.xml.sax.InputSource;
|
||||||
import org.xml.sax.helpers.DefaultHandler;
|
import org.xml.sax.helpers.DefaultHandler;
|
||||||
|
|
||||||
|
@ -153,7 +155,7 @@ import javax.xml.parsers.SAXParser;
|
||||||
import javax.xml.parsers.SAXParserFactory;
|
import javax.xml.parsers.SAXParserFactory;
|
||||||
|
|
||||||
public class TestEncryptionZones {
|
public class TestEncryptionZones {
|
||||||
static final Logger LOG = Logger.getLogger(TestEncryptionZones.class);
|
static final Logger LOG = LoggerFactory.getLogger(TestEncryptionZones.class);
|
||||||
|
|
||||||
protected Configuration conf;
|
protected Configuration conf;
|
||||||
private FileSystemTestHelper fsHelper;
|
private FileSystemTestHelper fsHelper;
|
||||||
|
@ -197,7 +199,8 @@ public class TestEncryptionZones {
|
||||||
2);
|
2);
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
cluster.waitActive();
|
cluster.waitActive();
|
||||||
Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE);
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
fsWrapper = new FileSystemTestWrapper(fs);
|
fsWrapper = new FileSystemTestWrapper(fs);
|
||||||
fcWrapper = new FileContextTestWrapper(
|
fcWrapper = new FileContextTestWrapper(
|
||||||
|
|
|
@ -42,8 +42,8 @@ import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests the building blocks that are needed to
|
* This class tests the building blocks that are needed to
|
||||||
|
@ -52,9 +52,9 @@ import org.junit.Test;
|
||||||
public class TestFileAppend2 {
|
public class TestFileAppend2 {
|
||||||
|
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
static final int numBlocks = 5;
|
static final int numBlocks = 5;
|
||||||
|
|
|
@ -46,20 +46,19 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/** This class implements some of tests posted in HADOOP-2658. */
|
/** This class implements some of tests posted in HADOOP-2658. */
|
||||||
public class TestFileAppend3 {
|
public class TestFileAppend3 {
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, org.slf4j
|
GenericTestUtils.setLogLevel(InterDatanodeProtocol.LOG, Level.TRACE);
|
||||||
.event.Level.TRACE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static final long BLOCK_SIZE = 64 * 1024;
|
static final long BLOCK_SIZE = 64 * 1024;
|
||||||
|
|
|
@ -47,9 +47,9 @@ import org.apache.hadoop.hdfs.server.namenode.INodeFile;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
|
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/* File Append tests for HDFS-200 & HDFS-142, specifically focused on:
|
/* File Append tests for HDFS-200 & HDFS-142, specifically focused on:
|
||||||
* using append()/sync() to recover block information
|
* using append()/sync() to recover block information
|
||||||
|
@ -67,9 +67,9 @@ public class TestFileAppend4 {
|
||||||
FSDataOutputStream stm;
|
FSDataOutputStream stm;
|
||||||
|
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -38,11 +38,12 @@ import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
import org.slf4j.event.Level;
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +60,7 @@ public class TestFileConcurrentReader {
|
||||||
|
|
||||||
|
|
||||||
private static final Logger LOG =
|
private static final Logger LOG =
|
||||||
Logger.getLogger(TestFileConcurrentReader.class);
|
LoggerFactory.getLogger(TestFileConcurrentReader.class);
|
||||||
|
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.TRACE);
|
GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.TRACE);
|
||||||
|
|
|
@ -55,18 +55,18 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.PathUtils;
|
import org.apache.hadoop.test.PathUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test for corrupted file handling.
|
* A JUnit test for corrupted file handling.
|
||||||
*/
|
*/
|
||||||
public class TestFileCorruption {
|
public class TestFileCorruption {
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
static Logger LOG = NameNode.stateChangeLog;
|
static Logger LOG = NameNode.stateChangeLog;
|
||||||
|
|
||||||
|
|
|
@ -24,12 +24,12 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestFileCreationDelete {
|
public class TestFileCreationDelete {
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -35,15 +35,15 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/** Class contains a set of tests to verify the correctness of
|
/** Class contains a set of tests to verify the correctness of
|
||||||
* newly introduced {@link FSDataOutputStream#hflush()} method */
|
* newly introduced {@link FSDataOutputStream#hflush()} method */
|
||||||
public class TestHFlush {
|
public class TestHFlush {
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final String fName = "hflushtest.dat";
|
private final String fName = "hflushtest.dat";
|
||||||
|
|
|
@ -35,11 +35,11 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.Whitebox;
|
import org.apache.hadoop.test.Whitebox;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ public class TestLeaseRecoveryStriped {
|
||||||
private static final int bytesPerChecksum = 512;
|
private static final int bytesPerChecksum = 512;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSStripedOutputStream.LOG, Level.DEBUG);
|
GenericTestUtils.setLogLevel(DFSStripedOutputStream.LOG, Level.DEBUG);
|
||||||
GenericTestUtils.setLogLevel(BlockRecoveryWorker.LOG, Level.DEBUG);
|
GenericTestUtils.setLogLevel(BlockRecoveryWorker.LOG, Level.DEBUG);
|
||||||
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.DEBUG);
|
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.DEBUG);
|
||||||
|
|
|
@ -25,16 +25,16 @@ import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.client.impl.BlockReaderTestUtil;
|
import org.apache.hadoop.hdfs.client.impl.BlockReaderTestUtil;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.LogManager;
|
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Driver class for testing the use of DFSInputStream by multiple concurrent
|
* Driver class for testing the use of DFSInputStream by multiple concurrent
|
||||||
|
@ -57,8 +57,9 @@ public class TestParallelReadUtil {
|
||||||
static {
|
static {
|
||||||
// The client-trace log ends up causing a lot of blocking threads
|
// The client-trace log ends up causing a lot of blocking threads
|
||||||
// in this when it's being used as a performance benchmark.
|
// in this when it's being used as a performance benchmark.
|
||||||
LogManager.getLogger(DataNode.class.getName() + ".clienttrace")
|
GenericTestUtils.setLogLevel(
|
||||||
.setLevel(Level.WARN);
|
LoggerFactory.getLogger(DataNode.class.getName() + ".clienttrace"),
|
||||||
|
Level.WARN);
|
||||||
}
|
}
|
||||||
|
|
||||||
private class TestFileInfo {
|
private class TestFileInfo {
|
||||||
|
|
|
@ -35,10 +35,10 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.Replica;
|
import org.apache.hadoop.hdfs.server.datanode.Replica;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestPipelines {
|
public class TestPipelines {
|
||||||
public static final Logger LOG = LoggerFactory.getLogger(TestPipelines.class);
|
public static final Logger LOG = LoggerFactory.getLogger(TestPipelines.class);
|
||||||
|
@ -158,8 +158,8 @@ public class TestPipelines {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void initLoggers() {
|
private static void initLoggers() {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,6 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.DataTransferProtocol;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
|
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -61,6 +60,7 @@ import org.mockito.stubbing.Answer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests the DFS positional read functionality in a single node
|
* This class tests the DFS positional read functionality in a single node
|
||||||
|
@ -278,7 +278,7 @@ public class TestPread {
|
||||||
@Test
|
@Test
|
||||||
public void testPreadDFSNoChecksum() throws IOException {
|
public void testPreadDFSNoChecksum() throws IOException {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.TRACE);
|
||||||
dfsPreadTest(conf, false, false);
|
dfsPreadTest(conf, false, false);
|
||||||
dfsPreadTest(conf, true, false);
|
dfsPreadTest(conf, true, false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,11 +66,11 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.LambdaTestUtils;
|
import org.apache.hadoop.test.LambdaTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestReconstructStripedFile {
|
public class TestReconstructStripedFile {
|
||||||
public static final Logger LOG =
|
public static final Logger LOG =
|
||||||
|
@ -85,9 +85,9 @@ public class TestReconstructStripedFile {
|
||||||
private int dnNum;
|
private int dnNum;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
enum ReconstructionType {
|
enum ReconstructionType {
|
||||||
|
|
|
@ -27,13 +27,13 @@ import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSEditLog;
|
import org.apache.hadoop.hdfs.server.namenode.FSEditLog;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestRenameWhileOpen {
|
public class TestRenameWhileOpen {
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
//TODO: un-comment checkFullFile once the lease recovery is done
|
//TODO: un-comment checkFullFile once the lease recovery is done
|
||||||
|
|
|
@ -37,9 +37,9 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure;
|
||||||
import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure.Policy;
|
import org.apache.hadoop.hdfs.protocol.datatransfer.ReplaceDatanodeOnFailure.Policy;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests that data nodes are correctly replaced on failure.
|
* This class tests that data nodes are correctly replaced on failure.
|
||||||
|
@ -54,7 +54,7 @@ public class TestReplaceDatanodeOnFailure {
|
||||||
final private static String RACK1 = "/rack1";
|
final private static String RACK1 = "/rack1";
|
||||||
|
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataTransferProtocol.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Test DEFAULT ReplaceDatanodeOnFailure policy. */
|
/** Test DEFAULT ReplaceDatanodeOnFailure policy. */
|
||||||
|
|
|
@ -41,11 +41,12 @@ import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
|
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesEqual;
|
import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesEqual;
|
||||||
import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesNotEqual;
|
import static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesNotEqual;
|
||||||
|
@ -83,7 +84,8 @@ public class TestReservedRawPaths {
|
||||||
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()
|
||||||
);
|
);
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE);
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
fsWrapper = new FileSystemTestWrapper(cluster.getFileSystem());
|
fsWrapper = new FileSystemTestWrapper(cluster.getFileSystem());
|
||||||
fcWrapper = new FileContextTestWrapper(
|
fcWrapper = new FileContextTestWrapper(
|
||||||
|
|
|
@ -26,10 +26,10 @@ import org.apache.hadoop.fs.RemoteIterator;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
|
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType;
|
import org.apache.hadoop.hdfs.protocol.OpenFilesIterator.OpenFilesType;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -50,7 +50,7 @@ public class TestStripedFileAppend {
|
||||||
public static final Log LOG = LogFactory.getLog(TestStripedFileAppend.class);
|
public static final Log LOG = LogFactory.getLog(TestStripedFileAppend.class);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final int NUM_DATA_BLOCKS =
|
private static final int NUM_DATA_BLOCKS =
|
||||||
|
|
|
@ -34,11 +34,13 @@ import org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;
|
||||||
import org.apache.hadoop.hdfs.client.HdfsAdmin;
|
import org.apache.hadoop.hdfs.client.HdfsAdmin;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager;
|
import org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
@ -91,8 +93,8 @@ public class TestTrashWithEncryptionZones {
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,
|
||||||
2);
|
2);
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
org.apache.log4j.Logger
|
GenericTestUtils.setLogLevel(
|
||||||
.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);
|
LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE);
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
fsWrapper = new FileSystemTestWrapper(fs);
|
fsWrapper = new FileSystemTestWrapper(fs);
|
||||||
dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);
|
dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);
|
||||||
|
|
|
@ -25,10 +25,10 @@ import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
@ -41,8 +41,8 @@ public class TestWriteStripedFileWithFailure {
|
||||||
private Configuration conf = new HdfsConfiguration();
|
private Configuration conf = new HdfsConfiguration();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSOutputStream.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataStreamer.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ErasureCodingPolicy ecPolicy =
|
private final ErasureCodingPolicy ecPolicy =
|
||||||
|
|
|
@ -59,8 +59,9 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica;
|
||||||
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm;
|
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.LogManager;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A helper class to setup the cluster, and get to BlockReader and DataNode for a block.
|
* A helper class to setup the cluster, and get to BlockReader and DataNode for a block.
|
||||||
|
@ -238,33 +239,27 @@ public class BlockReaderTestUtil {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void enableHdfsCachingTracing() {
|
public static void enableHdfsCachingTracing() {
|
||||||
LogManager.getLogger(CacheReplicationMonitor.class.getName()).setLevel(
|
enableTraceLog(CacheReplicationMonitor.class);
|
||||||
Level.TRACE);
|
enableTraceLog(CacheManager.class);
|
||||||
LogManager.getLogger(CacheManager.class.getName()).setLevel(
|
enableTraceLog(FsDatasetCache.class);
|
||||||
Level.TRACE);
|
|
||||||
LogManager.getLogger(FsDatasetCache.class.getName()).setLevel(
|
|
||||||
Level.TRACE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void enableBlockReaderFactoryTracing() {
|
public static void enableBlockReaderFactoryTracing() {
|
||||||
LogManager.getLogger(BlockReaderFactory.class.getName()).setLevel(
|
enableTraceLog(BlockReaderFactory.class);
|
||||||
Level.TRACE);
|
enableTraceLog(ShortCircuitCache.class);
|
||||||
LogManager.getLogger(ShortCircuitCache.class.getName()).setLevel(
|
enableTraceLog(ShortCircuitReplica.class);
|
||||||
Level.TRACE);
|
enableTraceLog(BlockReaderLocal.class);
|
||||||
LogManager.getLogger(ShortCircuitReplica.class.getName()).setLevel(
|
|
||||||
Level.TRACE);
|
|
||||||
LogManager.getLogger(BlockReaderLocal.class.getName()).setLevel(
|
|
||||||
Level.TRACE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void enableShortCircuitShmTracing() {
|
public static void enableShortCircuitShmTracing() {
|
||||||
LogManager.getLogger(DfsClientShmManager.class.getName()).setLevel(
|
enableTraceLog(DfsClientShmManager.class);
|
||||||
Level.TRACE);
|
enableTraceLog(ShortCircuitRegistry.class);
|
||||||
LogManager.getLogger(ShortCircuitRegistry.class.getName()).setLevel(
|
enableTraceLog(ShortCircuitShm.class);
|
||||||
Level.TRACE);
|
enableTraceLog(DataNode.class);
|
||||||
LogManager.getLogger(ShortCircuitShm.class.getName()).setLevel(
|
}
|
||||||
Level.TRACE);
|
|
||||||
LogManager.getLogger(DataNode.class.getName()).setLevel(
|
private static void enableTraceLog(Class<?> clazz) {
|
||||||
Level.TRACE);
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(clazz), Level.TRACE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,10 +29,10 @@ import org.apache.hadoop.hdfs.DFSClient;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
|
import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.Status;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestClientBlockVerification {
|
public class TestClientBlockVerification {
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ public class TestClientBlockVerification {
|
||||||
static LocatedBlock testBlock = null;
|
static LocatedBlock testBlock = null;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(BlockReaderRemote.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockReaderRemote.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setupCluster() throws Exception {
|
public static void setupCluster() throws Exception {
|
||||||
|
|
|
@ -53,7 +53,6 @@ import org.apache.hadoop.hdfs.util.Holder;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.ExpectedException;
|
import org.junit.rules.ExpectedException;
|
||||||
|
@ -64,6 +63,7 @@ import org.mockito.stubbing.Answer;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Sets;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
|
|
||||||
public class TestQJMWithFaults {
|
public class TestQJMWithFaults {
|
||||||
|
@ -225,7 +225,7 @@ public class TestQJMWithFaults {
|
||||||
// If the user specifies a seed, then we should gather all the
|
// If the user specifies a seed, then we should gather all the
|
||||||
// IPC trace information so that debugging is easier. This makes
|
// IPC trace information so that debugging is easier. This makes
|
||||||
// the test run about 25% slower otherwise.
|
// the test run about 25% slower otherwise.
|
||||||
GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.TRACE);
|
||||||
} else {
|
} else {
|
||||||
seed = new Random().nextLong();
|
seed = new Random().nextLong();
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,7 +60,6 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -70,6 +69,7 @@ import org.mockito.Mockito;
|
||||||
import org.mockito.stubbing.Stubber;
|
import org.mockito.stubbing.Stubber;
|
||||||
|
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Functional tests for QuorumJournalManager.
|
* Functional tests for QuorumJournalManager.
|
||||||
|
@ -87,7 +87,7 @@ public class TestQuorumJournalManager {
|
||||||
private final List<QuorumJournalManager> toClose = Lists.newLinkedList();
|
private final List<QuorumJournalManager> toClose = Lists.newLinkedList();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
|
|
|
@ -87,7 +87,6 @@ import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Assume;
|
import org.junit.Assume;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -99,6 +98,7 @@ import org.apache.hadoop.thirdparty.protobuf.BlockingService;
|
||||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.StorageType;
|
import org.apache.hadoop.fs.StorageType;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/** Unit tests for block tokens */
|
/** Unit tests for block tokens */
|
||||||
public class TestBlockToken {
|
public class TestBlockToken {
|
||||||
|
@ -107,11 +107,11 @@ public class TestBlockToken {
|
||||||
private static final String ADDRESS = "0.0.0.0";
|
private static final String ADDRESS = "0.0.0.0";
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(Client.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(Server.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -61,7 +61,7 @@ public class TestBalancerWithMultipleNameNodes {
|
||||||
static final Logger LOG = Balancer.LOG;
|
static final Logger LOG = Balancer.LOG;
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
||||||
DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -35,13 +35,13 @@ import org.apache.hadoop.net.NetworkTopology;
|
||||||
import org.apache.hadoop.net.Node;
|
import org.apache.hadoop.net.Node;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.PathUtils;
|
import org.apache.hadoop.test.PathUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
abstract public class BaseReplicationPolicyTest {
|
abstract public class BaseReplicationPolicyTest {
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected NetworkTopology cluster;
|
protected NetworkTopology cluster;
|
||||||
|
|
|
@ -31,11 +31,11 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.BlockReportContext;
|
import org.apache.hadoop.hdfs.server.protocol.BlockReportContext;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -60,8 +60,8 @@ public class TestBlockReportRateLimiting {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void raiseBlockManagerLogLevels() {
|
public static void raiseBlockManagerLogLevels() {
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockReportLeaseManager.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockReportLeaseManager.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout=180000)
|
@Test(timeout=180000)
|
||||||
|
|
|
@ -64,9 +64,9 @@ import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.net.ServerSocketUtil;
|
import org.apache.hadoop.net.ServerSocketUtil;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestBlockTokenWithDFS {
|
public class TestBlockTokenWithDFS {
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class TestBlockTokenWithDFS {
|
||||||
private static final String FILE_TO_APPEND = "/fileToAppend.dat";
|
private static final String FILE_TO_APPEND = "/fileToAppend.dat";
|
||||||
|
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DFSClient.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static byte[] generateBytes(int fileSize){
|
public static byte[] generateBytes(int fileSize){
|
||||||
|
|
|
@ -45,9 +45,9 @@ import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
|
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
|
@ -319,7 +319,7 @@ public class TestNameNodePrunesMissingStorages {
|
||||||
.Builder(conf).numDataNodes(1)
|
.Builder(conf).numDataNodes(1)
|
||||||
.storagesPerDatanode(1)
|
.storagesPerDatanode(1)
|
||||||
.build();
|
.build();
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE);
|
||||||
try {
|
try {
|
||||||
cluster.waitActive();
|
cluster.waitActive();
|
||||||
final Path TEST_PATH = new Path("/foo1");
|
final Path TEST_PATH = new Path("/foo1");
|
||||||
|
|
|
@ -32,12 +32,12 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.Whitebox;
|
import org.apache.hadoop.test.Whitebox;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
@ -47,7 +47,7 @@ import java.util.function.Supplier;
|
||||||
*/
|
*/
|
||||||
public class TestPendingInvalidateBlock {
|
public class TestPendingInvalidateBlock {
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.DEBUG);
|
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final int BLOCKSIZE = 1024;
|
private static final int BLOCKSIZE = 1024;
|
||||||
|
|
|
@ -59,10 +59,10 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||||
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
|
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests the internals of PendingReconstructionBlocks.java, as well
|
* This class tests the internals of PendingReconstructionBlocks.java, as well
|
||||||
|
|
|
@ -35,13 +35,13 @@ import org.apache.hadoop.hdfs.server.namenode.INodeFile;
|
||||||
import org.apache.hadoop.net.NetworkTopology;
|
import org.apache.hadoop.net.NetworkTopology;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.Whitebox;
|
import org.apache.hadoop.test.Whitebox;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -53,9 +53,9 @@ public class TestReconstructStripedBlocksWithRackAwareness {
|
||||||
TestReconstructStripedBlocksWithRackAwareness.class);
|
TestReconstructStripedBlocksWithRackAwareness.class);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockPlacementPolicy.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.blockLog, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockManager.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ErasureCodingPolicy ecPolicy =
|
private final ErasureCodingPolicy ecPolicy =
|
||||||
|
|
|
@ -69,13 +69,13 @@ import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
|
import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is the base class for simulating a variety of situations
|
* This is the base class for simulating a variety of situations
|
||||||
|
@ -877,9 +877,9 @@ public abstract class BlockReportTestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void initLoggers() {
|
private static void initLoggers() {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(BlockReportTestBase.LOG, org.slf4j.event.Level.DEBUG);
|
GenericTestUtils.setLogLevel(BlockReportTestBase.LOG, Level.DEBUG);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Block findBlock(Path path, long size) throws IOException {
|
private Block findBlock(Path path, long size) throws IOException {
|
||||||
|
|
|
@ -86,7 +86,6 @@ import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.Rpc
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.PathUtils;
|
import org.apache.hadoop.test.PathUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.ArgumentCaptor;
|
import org.mockito.ArgumentCaptor;
|
||||||
|
@ -97,6 +96,7 @@ import org.mockito.stubbing.Answer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestBPOfferService {
|
public class TestBPOfferService {
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ public class TestBPOfferService {
|
||||||
private long nextFullBlockReportLeaseId = 1L;
|
private long nextFullBlockReportLeaseId = 1L;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private DatanodeProtocolClientSideTranslatorPB mockNN1;
|
private DatanodeProtocolClientSideTranslatorPB mockNN1;
|
||||||
|
|
|
@ -62,12 +62,12 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.VolumeScanner.Statistics;
|
import org.apache.hadoop.hdfs.server.datanode.VolumeScanner.Statistics;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestBlockScanner {
|
public class TestBlockScanner {
|
||||||
public static final Logger LOG =
|
public static final Logger LOG =
|
||||||
|
@ -76,9 +76,9 @@ public class TestBlockScanner {
|
||||||
@Before
|
@Before
|
||||||
public void before() {
|
public void before() {
|
||||||
BlockScanner.Conf.allowUnitTestSettings = true;
|
BlockScanner.Conf.allowUnitTestSettings = true;
|
||||||
GenericTestUtils.setLogLevel(BlockScanner.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(BlockScanner.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(VolumeScanner.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(VolumeScanner.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(FsVolumeImpl.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(FsVolumeImpl.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void disableBlockScanner(Configuration conf) {
|
private static void disableBlockScanner(Configuration conf) {
|
||||||
|
@ -899,7 +899,7 @@ public class TestBlockScanner {
|
||||||
*/
|
*/
|
||||||
@Test(timeout=120000)
|
@Test(timeout=120000)
|
||||||
public void testAppendWhileScanning() throws Exception {
|
public void testAppendWhileScanning() throws Exception {
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
// throttle the block scanner: 1MB per second
|
// throttle the block scanner: 1MB per second
|
||||||
conf.setLong(DFS_BLOCK_SCANNER_VOLUME_BYTES_PER_SECOND, 1048576);
|
conf.setLong(DFS_BLOCK_SCANNER_VOLUME_BYTES_PER_SECOND, 1048576);
|
||||||
|
|
|
@ -57,8 +57,6 @@ import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
|
|
||||||
import org.apache.log4j.Level;
|
|
||||||
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -70,6 +68,7 @@ import org.mockito.stubbing.Answer;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -82,7 +81,7 @@ public class TestDataNodeLifeline {
|
||||||
TestDataNodeLifeline.class);
|
TestDataNodeLifeline.class);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
|
|
|
@ -75,7 +75,6 @@ import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.AutoCloseableLock;
|
import org.apache.hadoop.util.AutoCloseableLock;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.SimpleLayout;
|
import org.apache.log4j.SimpleLayout;
|
||||||
import org.apache.log4j.WriterAppender;
|
import org.apache.log4j.WriterAppender;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -83,6 +82,7 @@ import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests {@link DirectoryScanner} handling of differences between blocks on the
|
* Tests {@link DirectoryScanner} handling of differences between blocks on the
|
||||||
|
@ -410,7 +410,7 @@ public class TestDirectoryScanner {
|
||||||
ByteArrayOutputStream loggerStream = new ByteArrayOutputStream();
|
ByteArrayOutputStream loggerStream = new ByteArrayOutputStream();
|
||||||
org.apache.log4j.Logger rootLogger =
|
org.apache.log4j.Logger rootLogger =
|
||||||
org.apache.log4j.Logger.getRootLogger();
|
org.apache.log4j.Logger.getRootLogger();
|
||||||
rootLogger.setLevel(Level.INFO);
|
GenericTestUtils.setRootLogLevel(Level.INFO);
|
||||||
WriterAppender writerAppender =
|
WriterAppender writerAppender =
|
||||||
new WriterAppender(new SimpleLayout(), loggerStream);
|
new WriterAppender(new SimpleLayout(), loggerStream);
|
||||||
rootLogger.addAppender(writerAppender);
|
rootLogger.addAppender(writerAppender);
|
||||||
|
|
|
@ -36,11 +36,11 @@ import org.apache.hadoop.hdfs.server.protocol.BlockReportContext;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
|
import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests that very large block reports can pass through the RPC server and
|
* Tests that very large block reports can pass through the RPC server and
|
||||||
|
|
|
@ -38,9 +38,9 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
|
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_WRITE_BANDWIDTHPERSEC_KEY;
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ public class TestTransferRbw {
|
||||||
LoggerFactory.getLogger(TestTransferRbw.class);
|
LoggerFactory.getLogger(TestTransferRbw.class);
|
||||||
|
|
||||||
{
|
{
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Random RAN = new Random();
|
private static final Random RAN = new Random();
|
||||||
|
|
|
@ -46,7 +46,7 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi.FsVolumeRef
|
||||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
|
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.DataChecksum;
|
import org.apache.hadoop.util.DataChecksum;
|
||||||
import org.apache.log4j.Level;
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.DataOutputStream;
|
import java.io.DataOutputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -500,7 +500,6 @@ public class FsDatasetImplTestUtils implements FsDatasetTestUtils {
|
||||||
* @param level the level to set
|
* @param level the level to set
|
||||||
*/
|
*/
|
||||||
public static void setFsDatasetImplLogLevel(Level level) {
|
public static void setFsDatasetImplLogLevel(Level level) {
|
||||||
GenericTestUtils.setLogLevel(FsDatasetImpl.LOG,
|
GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, level);
|
||||||
org.slf4j.event.Level.valueOf(level.toString()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,13 +45,13 @@ import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Daemon;
|
import org.apache.hadoop.util.Daemon;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.ExpectedException;
|
import org.junit.rules.ExpectedException;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
@ -101,8 +101,8 @@ public class TestSpaceReservation {
|
||||||
}
|
}
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(FsDatasetImpl.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNode.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -23,13 +23,13 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.metrics2.lib.MetricsTestHelper;
|
import org.apache.hadoop.metrics2.lib.MetricsTestHelper;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.Timeout;
|
import org.junit.rules.Timeout;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
@ -67,8 +67,8 @@ public class TestDataNodeOutlierDetectionViaMetrics {
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setup() {
|
public void setup() {
|
||||||
GenericTestUtils.setLogLevel(DataNodePeerMetrics.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(DataNodePeerMetrics.LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.TRACE);
|
||||||
conf = new HdfsConfiguration();
|
conf = new HdfsConfiguration();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,13 +23,13 @@ import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
|
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
|
||||||
import org.apache.commons.lang3.tuple.Pair;
|
import org.apache.commons.lang3.tuple.Pair;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.Timeout;
|
import org.junit.rules.Timeout;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -239,7 +239,7 @@ public class TestSlowNodeDetector {
|
||||||
public void setup() {
|
public void setup() {
|
||||||
slowNodeDetector = new OutlierDetector(MIN_OUTLIER_DETECTION_PEERS,
|
slowNodeDetector = new OutlierDetector(MIN_OUTLIER_DETECTION_PEERS,
|
||||||
(long) LOW_THRESHOLD);
|
(long) LOW_THRESHOLD);
|
||||||
GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(OutlierDetector.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -85,8 +85,7 @@ import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.apache.log4j.Level;
|
import org.slf4j.event.Level;
|
||||||
import org.apache.log4j.LogManager;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Main class for a series of name-node benchmarks.
|
* Main class for a series of name-node benchmarks.
|
||||||
|
@ -150,9 +149,9 @@ public class NNThroughputBenchmark implements Tool {
|
||||||
LOG.info("Log level = " + logLevel.toString());
|
LOG.info("Log level = " + logLevel.toString());
|
||||||
// change log level to NameNode logs
|
// change log level to NameNode logs
|
||||||
DFSTestUtil.setNameNodeLogLevel(logLevel);
|
DFSTestUtil.setNameNodeLogLevel(logLevel);
|
||||||
GenericTestUtils.setLogLevel(LogManager.getLogger(
|
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(
|
||||||
NetworkTopology.class.getName()), logLevel);
|
NetworkTopology.class.getName()), logLevel);
|
||||||
GenericTestUtils.setLogLevel(LogManager.getLogger(
|
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(
|
||||||
Groups.class.getName()), logLevel);
|
Groups.class.getName()), logLevel);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -353,7 +352,7 @@ public class NNThroughputBenchmark implements Tool {
|
||||||
if(llIndex >= 0) {
|
if(llIndex >= 0) {
|
||||||
if(args.size() <= llIndex + 1)
|
if(args.size() <= llIndex + 1)
|
||||||
printUsage();
|
printUsage();
|
||||||
logLevel = Level.toLevel(args.get(llIndex+1), Level.ERROR);
|
logLevel = Level.valueOf(args.get(llIndex+1));
|
||||||
args.remove(llIndex+1);
|
args.remove(llIndex+1);
|
||||||
args.remove(llIndex);
|
args.remove(llIndex);
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,10 +26,10 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.FSNamesystemAuditLogger;
|
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.FSNamesystemAuditLogger;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.Timeout;
|
import org.junit.rules.Timeout;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.net.Inet4Address;
|
import java.net.Inet4Address;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.hadoop.security.authorize.ProxyServers;
|
||||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
import org.apache.hadoop.test.GenericTestUtils.LogCapturer;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -48,6 +47,7 @@ import org.mockito.Mockito;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
|
@ -85,7 +85,7 @@ public class TestAuditLogger {
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(
|
private static final Logger LOG = LoggerFactory.getLogger(
|
||||||
TestAuditLogger.class);
|
TestAuditLogger.class);
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final short TEST_PERMISSION = (short) 0654;
|
private static final short TEST_PERMISSION = (short) 0654;
|
||||||
|
|
|
@ -47,6 +47,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.PathUtils;
|
import org.apache.hadoop.test.PathUtils;
|
||||||
import org.apache.log4j.Appender;
|
import org.apache.log4j.Appender;
|
||||||
import org.apache.log4j.AsyncAppender;
|
import org.apache.log4j.AsyncAppender;
|
||||||
|
@ -61,6 +62,7 @@ import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.Parameterized;
|
import org.junit.runners.Parameterized;
|
||||||
import org.junit.runners.Parameterized.Parameters;
|
import org.junit.runners.Parameterized.Parameters;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A JUnit test that audit logs are generated
|
* A JUnit test that audit logs are generated
|
||||||
|
@ -297,11 +299,11 @@ public class TestAuditLogs {
|
||||||
if (file.exists()) {
|
if (file.exists()) {
|
||||||
assertTrue(file.delete());
|
assertTrue(file.delete());
|
||||||
}
|
}
|
||||||
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
|
||||||
// disable logging while the cluster startup preps files
|
// disable logging while the cluster startup preps files
|
||||||
logger.setLevel(Level.OFF);
|
disableAuditLog();
|
||||||
PatternLayout layout = new PatternLayout("%m%n");
|
PatternLayout layout = new PatternLayout("%m%n");
|
||||||
RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
|
RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
|
||||||
|
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
||||||
logger.addAppender(appender);
|
logger.addAppender(appender);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -314,10 +316,10 @@ public class TestAuditLogs {
|
||||||
private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
|
private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Turn off the logs
|
// Turn off the logs
|
||||||
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
disableAuditLog();
|
||||||
logger.setLevel(Level.OFF);
|
|
||||||
|
|
||||||
// Close the appenders and force all logs to be flushed
|
// Close the appenders and force all logs to be flushed
|
||||||
|
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
||||||
Enumeration<?> appenders = logger.getAllAppenders();
|
Enumeration<?> appenders = logger.getAllAppenders();
|
||||||
while (appenders.hasMoreElements()) {
|
while (appenders.hasMoreElements()) {
|
||||||
Appender appender = (Appender)appenders.nextElement();
|
Appender appender = (Appender)appenders.nextElement();
|
||||||
|
@ -347,10 +349,10 @@ public class TestAuditLogs {
|
||||||
private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Pattern pattern)
|
private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Pattern pattern)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Turn off the logs
|
// Turn off the logs
|
||||||
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
disableAuditLog();
|
||||||
logger.setLevel(Level.OFF);
|
|
||||||
|
|
||||||
// Close the appenders and force all logs to be flushed
|
// Close the appenders and force all logs to be flushed
|
||||||
|
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
||||||
Enumeration<?> appenders = logger.getAllAppenders();
|
Enumeration<?> appenders = logger.getAllAppenders();
|
||||||
while (appenders.hasMoreElements()) {
|
while (appenders.hasMoreElements()) {
|
||||||
Appender appender = (Appender)appenders.nextElement();
|
Appender appender = (Appender)appenders.nextElement();
|
||||||
|
@ -376,4 +378,10 @@ public class TestAuditLogs {
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void disableAuditLog() {
|
||||||
|
GenericTestUtils.disableLog(LoggerFactory.getLogger(
|
||||||
|
FSNamesystem.class.getName() + ".audit"));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,11 +57,12 @@ import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
|
||||||
import org.apache.hadoop.hdfs.tools.DFSAdmin;
|
import org.apache.hadoop.hdfs.tools.DFSAdmin;
|
||||||
import org.apache.hadoop.hdfs.util.HostsFileWriter;
|
import org.apache.hadoop.hdfs.util.HostsFileWriter;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class tests the decommissioning of nodes.
|
* This class tests the decommissioning of nodes.
|
||||||
|
@ -107,8 +108,9 @@ public class TestDecommissioningStatus {
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REDUNDANCY_INTERVAL_SECONDS_KEY, 1);
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1);
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1);
|
||||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1);
|
conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1);
|
||||||
Logger.getLogger(DatanodeAdminManager.class).setLevel(Level.DEBUG);
|
GenericTestUtils.setLogLevel(
|
||||||
LOG = Logger.getLogger(TestDecommissioningStatus.class);
|
LoggerFactory.getLogger(DatanodeAdminManager.class), Level.DEBUG);
|
||||||
|
LOG = LoggerFactory.getLogger(TestDecommissioningStatus.class);
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -388,8 +390,8 @@ public class TestDecommissioningStatus {
|
||||||
*/
|
*/
|
||||||
@Test(timeout=120000)
|
@Test(timeout=120000)
|
||||||
public void testDecommissionDeadDN() throws Exception {
|
public void testDecommissionDeadDN() throws Exception {
|
||||||
Logger log = Logger.getLogger(DatanodeAdminManager.class);
|
Logger log = LoggerFactory.getLogger(DatanodeAdminManager.class);
|
||||||
log.setLevel(Level.DEBUG);
|
GenericTestUtils.setLogLevel(log, Level.DEBUG);
|
||||||
DatanodeID dnID = cluster.getDataNodes().get(0).getDatanodeId();
|
DatanodeID dnID = cluster.getDataNodes().get(0).getDatanodeId();
|
||||||
String dnName = dnID.getXferAddr();
|
String dnName = dnID.getXferAddr();
|
||||||
DataNodeProperties stoppedDN = cluster.stopDataNode(0);
|
DataNodeProperties stoppedDN = cluster.stopDataNode(0);
|
||||||
|
|
|
@ -91,7 +91,6 @@ import org.apache.hadoop.util.ExitUtil;
|
||||||
import org.apache.hadoop.util.ExitUtil.ExitException;
|
import org.apache.hadoop.util.ExitUtil.ExitException;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.AppenderSkeleton;
|
import org.apache.log4j.AppenderSkeleton;
|
||||||
import org.apache.log4j.LogManager;
|
import org.apache.log4j.LogManager;
|
||||||
import org.apache.log4j.spi.LoggingEvent;
|
import org.apache.log4j.spi.LoggingEvent;
|
||||||
|
@ -100,6 +99,7 @@ import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.Parameterized;
|
import org.junit.runners.Parameterized;
|
||||||
import org.junit.runners.Parameterized.Parameters;
|
import org.junit.runners.Parameterized.Parameters;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
import org.xml.sax.ContentHandler;
|
import org.xml.sax.ContentHandler;
|
||||||
import org.xml.sax.SAXException;
|
import org.xml.sax.SAXException;
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||||
public class TestEditLog {
|
public class TestEditLog {
|
||||||
|
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.ALL);
|
GenericTestUtils.setLogLevel(FSEditLog.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Parameters
|
@Parameters
|
||||||
|
|
|
@ -20,11 +20,12 @@ package org.apache.hadoop.hdfs.server.namenode;
|
||||||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.HAUtil;
|
import org.apache.hadoop.hdfs.HAUtil;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
public class TestFsImageValidation {
|
public class TestFsImageValidation {
|
||||||
static final Logger LOG = LoggerFactory.getLogger(
|
static final Logger LOG = LoggerFactory.getLogger(
|
||||||
|
@ -32,9 +33,11 @@ public class TestFsImageValidation {
|
||||||
|
|
||||||
static {
|
static {
|
||||||
final Level t = Level.TRACE;
|
final Level t = Level.TRACE;
|
||||||
FsImageValidation.Util.setLogLevel(FsImageValidation.class, t);
|
GenericTestUtils.setLogLevel(
|
||||||
FsImageValidation.Util.setLogLevel(INodeReferenceValidation.class, t);
|
LoggerFactory.getLogger(FsImageValidation.class), t);
|
||||||
FsImageValidation.Util.setLogLevel(INode.class, t);
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(INodeReferenceValidation.class), t);
|
||||||
|
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(INode.class), t);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -264,8 +264,8 @@ public class TestFsck {
|
||||||
|
|
||||||
private void verifyAuditLogs() throws IOException {
|
private void verifyAuditLogs() throws IOException {
|
||||||
// Turn off the logs
|
// Turn off the logs
|
||||||
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
GenericTestUtils.disableLog(LoggerFactory.getLogger(
|
||||||
logger.setLevel(Level.OFF);
|
FSNamesystem.class.getName() + ".audit"));
|
||||||
|
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
try {
|
try {
|
||||||
|
@ -292,6 +292,7 @@ public class TestFsck {
|
||||||
if (reader != null) {
|
if (reader != null) {
|
||||||
reader.close();
|
reader.close();
|
||||||
}
|
}
|
||||||
|
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
|
||||||
if (logger != null) {
|
if (logger != null) {
|
||||||
logger.removeAllAppenders();
|
logger.removeAllAppenders();
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,9 +36,9 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
||||||
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
|
||||||
import org.apache.hadoop.hdfs.server.balancer.TestBalancer;
|
import org.apache.hadoop.hdfs.server.balancer.TestBalancer;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test fsck with multiple NameNodes
|
* Test fsck with multiple NameNodes
|
||||||
|
@ -47,7 +47,7 @@ public class TestFsckWithMultipleNameNodes {
|
||||||
static final Logger LOG =
|
static final Logger LOG =
|
||||||
LoggerFactory.getLogger(TestFsckWithMultipleNameNodes.class);
|
LoggerFactory.getLogger(TestFsckWithMultipleNameNodes.class);
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -32,12 +32,13 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.apache.log4j.Logger;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
|
||||||
|
@ -103,7 +104,8 @@ public class TestNestedEncryptionZones {
|
||||||
// enable trash for testing
|
// enable trash for testing
|
||||||
conf.setLong(DFSConfigKeys.FS_TRASH_INTERVAL_KEY, 1);
|
conf.setLong(DFSConfigKeys.FS_TRASH_INTERVAL_KEY, 1);
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
|
||||||
Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);
|
GenericTestUtils.setLogLevel(
|
||||||
|
LoggerFactory.getLogger(EncryptionZoneManager.class), Level.TRACE);
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
setProvider();
|
setProvider();
|
||||||
|
|
||||||
|
|
|
@ -58,12 +58,12 @@ import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
|
import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
|
|
||||||
public class TestDNFencing {
|
public class TestDNFencing {
|
||||||
|
@ -79,7 +79,7 @@ public class TestDNFencing {
|
||||||
private FileSystem fs;
|
private FileSystem fs;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
|
|
@ -32,9 +32,9 @@ import org.apache.hadoop.ipc.Server;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread;
|
import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread;
|
||||||
import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
|
import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -46,8 +46,8 @@ import java.util.function.Supplier;
|
||||||
public class TestDNFencingWithReplication {
|
public class TestDNFencingWithReplication {
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(FSNamesystem.auditLog, Level.WARN);
|
GenericTestUtils.setLogLevel(FSNamesystem.auditLog, Level.WARN);
|
||||||
GenericTestUtils.setLogLevel(Server.LOG, Level.FATAL);
|
GenericTestUtils.setLogLevel(Server.LOG, Level.ERROR);
|
||||||
GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.FATAL);
|
GenericTestUtils.setLogLevel(RetryInvocationHandler.LOG, Level.ERROR);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final int NUM_THREADS = 20;
|
private static final int NUM_THREADS = 20;
|
||||||
|
|
|
@ -88,7 +88,7 @@ public class TestHASafeMode {
|
||||||
private MiniDFSCluster cluster;
|
private MiniDFSCluster cluster;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(FSImage.LOG, Level.TRACE);
|
GenericTestUtils.setLogLevel(FSImage.LOG, Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -58,9 +58,9 @@ import org.apache.hadoop.test.MultithreadedTestUtil.RepeatingTestThread;
|
||||||
import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
|
import org.apache.hadoop.test.MultithreadedTestUtil.TestContext;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -69,9 +69,9 @@ import java.util.function.Supplier;
|
||||||
*/
|
*/
|
||||||
public class TestPipelinesFailover {
|
public class TestPipelinesFailover {
|
||||||
static {
|
static {
|
||||||
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(RetryInvocationHandler
|
GenericTestUtils.setLogLevel(LoggerFactory.getLogger(
|
||||||
.class), org.slf4j.event.Level.DEBUG);
|
RetryInvocationHandler.class), Level.DEBUG);
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static final Logger LOG = LoggerFactory.getLogger(
|
protected static final Logger LOG = LoggerFactory.getLogger(
|
||||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ public class TestStandbyBlockManagement {
|
||||||
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
|
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout=60000)
|
@Test(timeout=60000)
|
||||||
|
|
|
@ -39,9 +39,9 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
|
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ public class TestStandbyIsHot {
|
||||||
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
|
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout=60000)
|
@Test(timeout=60000)
|
||||||
|
|
|
@ -28,11 +28,11 @@ import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test WebHDFS files/directories creation to make sure it follows same rules
|
* Test WebHDFS files/directories creation to make sure it follows same rules
|
||||||
|
@ -42,7 +42,7 @@ public class TestWebHdfsCreatePermissions {
|
||||||
static final Logger LOG =
|
static final Logger LOG =
|
||||||
LoggerFactory.getLogger(TestWebHdfsCreatePermissions.class);
|
LoggerFactory.getLogger(TestWebHdfsCreatePermissions.class);
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private MiniDFSCluster cluster;
|
private MiniDFSCluster cluster;
|
||||||
|
|
|
@ -45,11 +45,11 @@ import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
|
||||||
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
|
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
|
||||||
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
|
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
|
||||||
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
|
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
|
||||||
import org.apache.log4j.Level;
|
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.ExpectedException;
|
import org.junit.rules.ExpectedException;
|
||||||
|
import org.slf4j.event.Level;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test WebHDFS which provides data locality using HTTP redirection.
|
* Test WebHDFS which provides data locality using HTTP redirection.
|
||||||
|
@ -58,7 +58,7 @@ public class TestWebHdfsDataLocality {
|
||||||
static final Logger LOG =
|
static final Logger LOG =
|
||||||
LoggerFactory.getLogger(TestWebHdfsDataLocality.class);
|
LoggerFactory.getLogger(TestWebHdfsDataLocality.class);
|
||||||
{
|
{
|
||||||
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final String RACK0 = "/rack0";
|
private static final String RACK0 = "/rack0";
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class TestWebHdfsWithMultipleNameNodes {
|
||||||
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
GenericTestUtils.setLogLevel(LOG, Level.TRACE);
|
||||||
GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.TRACE);
|
GenericTestUtils.setLogLevel(NamenodeWebHdfsMethods.LOG, Level.TRACE);
|
||||||
|
|
||||||
DFSTestUtil.setNameNodeLogLevel(org.apache.log4j.Level.TRACE);
|
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Configuration conf = new HdfsConfiguration();
|
private static final Configuration conf = new HdfsConfiguration();
|
||||||
|
|
Loading…
Reference in New Issue