HDFS-6234. TestDatanodeConfig#testMemlockLimit fails on Windows due to invalid file path. Contributed by Chris Nauroth.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1586682 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris Nauroth 2014-04-11 15:30:18 +00:00
parent 91ebf58904
commit 76ea55caeb
2 changed files with 15 additions and 3 deletions

View File

@ -375,6 +375,9 @@ Release 2.4.1 - UNRELEASED
HDFS-6231. DFSClient hangs infinitely if using hedged reads and all eligible HDFS-6231. DFSClient hangs infinitely if using hedged reads and all eligible
datanodes die. (cnauroth) datanodes die. (cnauroth)
HDFS-6234. TestDatanodeConfig#testMemlockLimit fails on Windows due to
invalid file path. (cnauroth)
Release 2.4.0 - 2014-04-07 Release 2.4.0 - 2014-04-07
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -86,9 +86,11 @@ public class TestDatanodeConfig {
fail(); fail();
} catch(Exception e) { } catch(Exception e) {
// expecting exception here // expecting exception here
} finally {
if (dn != null) {
dn.shutdown();
}
} }
if(dn != null)
dn.shutdown();
assertNull("Data-node startup should have failed.", dn); assertNull("Data-node startup should have failed.", dn);
// 2. Test "file:" schema and no schema (path-only). Both should work. // 2. Test "file:" schema and no schema (path-only). Both should work.
@ -121,17 +123,21 @@ public class TestDatanodeConfig {
// Can't increase the memlock limit past the maximum. // Can't increase the memlock limit past the maximum.
assumeTrue(memlockLimit != Long.MAX_VALUE); assumeTrue(memlockLimit != Long.MAX_VALUE);
File dataDir = new File(BASE_DIR, "data").getCanonicalFile();
Configuration conf = cluster.getConfiguration(0); Configuration conf = cluster.getConfiguration(0);
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY,
makeURI("file", null, fileAsURI(dataDir).getPath()));
long prevLimit = conf. long prevLimit = conf.
getLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, getLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT); DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_DEFAULT);
DataNode dn = null;
try { try {
// Try starting the DN with limit configured to the ulimit // Try starting the DN with limit configured to the ulimit
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
memlockLimit); memlockLimit);
DataNode dn = null;
dn = DataNode.createDataNode(new String[]{}, conf); dn = DataNode.createDataNode(new String[]{}, conf);
dn.shutdown(); dn.shutdown();
dn = null;
// Try starting the DN with a limit > ulimit // Try starting the DN with a limit > ulimit
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
memlockLimit+1); memlockLimit+1);
@ -142,6 +148,9 @@ public class TestDatanodeConfig {
"more than the datanode's available RLIMIT_MEMLOCK", e); "more than the datanode's available RLIMIT_MEMLOCK", e);
} }
} finally { } finally {
if (dn != null) {
dn.shutdown();
}
conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY, conf.setLong(DFSConfigKeys.DFS_DATANODE_MAX_LOCKED_MEMORY_KEY,
prevLimit); prevLimit);
} }