HDFS-5348. Fix error message when dfs.datanode.max.locked.memory is improperly configured. (Contributed by Colin Patrick McCabe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-4949@1531460 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Wang 2013-10-11 23:13:13 +00:00
parent 8111c3af6b
commit 09e9e57a0b
3 changed files with 29 additions and 18 deletions

View File

@ -83,3 +83,6 @@ HDFS-4949 (Unreleased)
HDFS-5314. Do not expose CachePool type in AddCachePoolOp (Colin Patrick HDFS-5314. Do not expose CachePool type in AddCachePoolOp (Colin Patrick
McCabe) McCabe)
HDFS-5348. Fix error message when dfs.datanode.max.locked.memory is
improperly configured. (Colin Patrick McCabe)

View File

@ -753,7 +753,7 @@ void startDataNode(Configuration conf,
if (dnConf.maxLockedMemory > ulimit) { if (dnConf.maxLockedMemory > ulimit) {
throw new RuntimeException(String.format( throw new RuntimeException(String.format(
"Cannot start datanode because the configured max locked memory" + "Cannot start datanode because the configured max locked memory" +
" size (%s) of %d bytes is less than the datanode's available" + " size (%s) of %d bytes is more than the datanode's available" +
" RLIMIT_MEMLOCK ulimit of %d bytes.", " RLIMIT_MEMLOCK ulimit of %d bytes.",
DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
dnConf.maxLockedMemory, dnConf.maxLockedMemory,

View File

@ -114,25 +114,33 @@ private static String makeURI(String scheme, String host, String path)
public void testMemlockLimit() throws Exception { public void testMemlockLimit() throws Exception {
assumeTrue(NativeIO.isAvailable()); assumeTrue(NativeIO.isAvailable());
final long memlockLimit = NativeIO.getMemlockLimit(); final long memlockLimit = NativeIO.getMemlockLimit();
// Can't increase the memlock limit past the maximum.
assumeTrue(memlockLimit != Long.MAX_VALUE);
Configuration conf = cluster.getConfiguration(0); Configuration conf = cluster.getConfiguration(0);
// Try starting the DN with limit configured to the ulimit long prevLimit = conf.
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, getLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
memlockLimit); DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT);
if (memlockLimit == Long.MAX_VALUE) {
// Can't increase the memlock limit past the maximum.
return;
}
DataNode dn = null;
dn = DataNode.createDataNode(new String[]{}, conf);
dn.shutdown();
// Try starting the DN with a limit > ulimit
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
memlockLimit+1);
try { try {
dn = DataNode.createDataNode(new String[]{}, conf); // Try starting the DN with limit configured to the ulimit
} catch (RuntimeException e) { conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
GenericTestUtils.assertExceptionContains( memlockLimit);
"less than the datanode's available RLIMIT_MEMLOCK", e); DataNode dn = null;
dn = DataNode.createDataNode(new String[]{}, conf);
dn.shutdown();
// Try starting the DN with a limit > ulimit
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
memlockLimit+1);
try {
dn = DataNode.createDataNode(new String[]{}, conf);
} catch (RuntimeException e) {
GenericTestUtils.assertExceptionContains(
"more than the datanode's available RLIMIT_MEMLOCK", e);
}
} finally {
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
prevLimit);
} }
} }
} }