diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index f64a390b4fc..fea9b8e8973 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -338,6 +338,8 @@ Trunk (Unreleased) HDFS-4603. TestMiniDFSCluster fails on Windows. (Ivan Mitic via suresh) + HDFS-4604. TestJournalNode fails on Windows. (Ivan Mitic via suresh) + Release 2.0.5-beta - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java index 91629d930c9..fe08f209f4a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNode.java @@ -35,6 +35,7 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -82,7 +83,6 @@ public class JournalNode implements Tool, Configurable { return journal; } - @Override public void setConf(Configuration conf) { this.conf = conf; @@ -97,21 +97,9 @@ public class JournalNode implements Tool, Configurable { "Journal dir '" + dir + "' should be an absolute path"); } - if (!dir.exists() && !dir.mkdirs()) { - throw new IOException("Could not create journal dir '" + - dir + "'"); - } else if (!dir.isDirectory()) { - throw new IOException("Journal directory '" + dir + "' is not " + - "a directory"); - } - - if (!dir.canWrite()) { - throw new IOException("Unable to write to journal dir '" + - dir + "'"); - } + DiskChecker.checkDir(dir); } - @Override public Configuration getConf() { return conf; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java index f57f7e66b40..e6e140443bf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java @@ -46,6 +46,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.MetricsAsserts; +import org.apache.hadoop.util.Shell; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -65,6 +66,8 @@ public class TestJournalNode { private Configuration conf = new Configuration(); private IPCLoggerChannel ch; private String journalId; + private File TEST_BUILD_DATA = + new File(System.getProperty("test.build.data", "build/test/data")); static { // Avoid an error when we double-initialize JvmMetrics @@ -96,7 +99,7 @@ public class TestJournalNode { jn.stop(0); } - @Test + @Test(timeout=100000) public void testJournal() throws Exception { MetricsRecordBuilder metrics = MetricsAsserts.getMetrics( journal.getMetricsForTests().getName()); @@ -129,7 +132,7 @@ public class TestJournalNode { } - @Test + @Test(timeout=100000) public void testReturnsSegmentInfoAtEpochTransition() throws Exception { ch.newEpoch(1).get(); ch.setEpoch(1); @@ -157,7 +160,7 @@ public class TestJournalNode { assertEquals(1, response.getLastSegmentTxId()); } - @Test + @Test(timeout=100000) public void testHttpServer() throws Exception { InetSocketAddress addr = jn.getBoundHttpAddress(); assertTrue(addr.getPort() > 0); @@ -210,7 +213,7 @@ public class TestJournalNode { * Test that the JournalNode performs correctly as a Paxos * Acceptor process. */ - @Test + @Test(timeout=100000) public void testAcceptRecoveryBehavior() throws Exception { // We need to run newEpoch() first, or else we have no way to distinguish // different proposals for the same decision. @@ -270,20 +273,27 @@ public class TestJournalNode { } } - @Test + @Test(timeout=100000) public void testFailToStartWithBadConfig() throws Exception { Configuration conf = new Configuration(); conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, "non-absolute-path"); assertJNFailsToStart(conf, "should be an absolute path"); // Existing file which is not a directory - conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, "/dev/null"); - assertJNFailsToStart(conf, "is not a directory"); + File existingFile = new File(TEST_BUILD_DATA, "testjournalnodefile"); + assertTrue(existingFile.createNewFile()); + try { + conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, + existingFile.getAbsolutePath()); + assertJNFailsToStart(conf, "Not a directory"); + } finally { + existingFile.delete(); + } // Directory which cannot be created - conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, "/proc/does-not-exist"); - assertJNFailsToStart(conf, "Could not create"); - + conf.set(DFSConfigKeys.DFS_JOURNALNODE_EDITS_DIR_KEY, + Shell.WINDOWS ? "\\\\cannotBeCreated" : "/proc/does-not-exist"); + assertJNFailsToStart(conf, "Can not create directory"); } private static void assertJNFailsToStart(Configuration conf,