HDFS-5348. Fix error message when dfs.datanode.max.locked.memory is improperly configured. (Contributed by Colin Patrick McCabe)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1531460 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8111c3af6b
commit
09e9e57a0b
|
@ -83,3 +83,6 @@ HDFS-4949 (Unreleased)
|
|||
|
||||
HDFS-5314. Do not expose CachePool type in AddCachePoolOp (Colin Patrick
|
||||
McCabe)
|
||||
|
||||
HDFS-5348. Fix error message when dfs.datanode.max.locked.memory is
|
||||
improperly configured. (Colin Patrick McCabe)
|
||||
|
|
|
@ -753,7 +753,7 @@ public class DataNode extends Configured
|
|||
if (dnConf.maxLockedMemory > ulimit) {
|
||||
throw new RuntimeException(String.format(
|
||||
"Cannot start datanode because the configured max locked memory" +
|
||||
" size (%s) of %d bytes is less than the datanode's available" +
|
||||
" size (%s) of %d bytes is more than the datanode's available" +
|
||||
" RLIMIT_MEMLOCK ulimit of %d bytes.",
|
||||
DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
dnConf.maxLockedMemory,
|
||||
|
|
|
@ -114,25 +114,33 @@ public class TestDatanodeConfig {
|
|||
public void testMemlockLimit() throws Exception {
|
||||
assumeTrue(NativeIO.isAvailable());
|
||||
final long memlockLimit = NativeIO.getMemlockLimit();
|
||||
|
||||
// Can't increase the memlock limit past the maximum.
|
||||
assumeTrue(memlockLimit != Long.MAX_VALUE);
|
||||
|
||||
Configuration conf = cluster.getConfiguration(0);
|
||||
// Try starting the DN with limit configured to the ulimit
|
||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
memlockLimit);
|
||||
if (memlockLimit == Long.MAX_VALUE) {
|
||||
// Can't increase the memlock limit past the maximum.
|
||||
return;
|
||||
}
|
||||
DataNode dn = null;
|
||||
dn = DataNode.createDataNode(new String[]{}, conf);
|
||||
dn.shutdown();
|
||||
// Try starting the DN with a limit > ulimit
|
||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
memlockLimit+1);
|
||||
long prevLimit = conf.
|
||||
getLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT);
|
||||
try {
|
||||
dn = DataNode.createDataNode(new String[]{}, conf);
|
||||
} catch (RuntimeException e) {
|
||||
GenericTestUtils.assertExceptionContains(
|
||||
"less than the datanode's available RLIMIT_MEMLOCK", e);
|
||||
// Try starting the DN with limit configured to the ulimit
|
||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
memlockLimit);
|
||||
DataNode dn = null;
|
||||
dn = DataNode.createDataNode(new String[]{}, conf);
|
||||
dn.shutdown();
|
||||
// Try starting the DN with a limit > ulimit
|
||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
memlockLimit+1);
|
||||
try {
|
||||
dn = DataNode.createDataNode(new String[]{}, conf);
|
||||
} catch (RuntimeException e) {
|
||||
GenericTestUtils.assertExceptionContains(
|
||||
"more than the datanode's available RLIMIT_MEMLOCK", e);
|
||||
}
|
||||
} finally {
|
||||
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
|
||||
prevLimit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue