diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 3cc339a596e..c034b5d0411 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -303,6 +303,9 @@ Release 0.23.3 - UNRELEASED HDFS-3005. FSVolume.decDfsUsed(..) should be synchronized. (szetszwo) + HDFS-3099. SecondaryNameNode does not properly initialize metrics system. + (atm) + BREAKDOWN OF HDFS-1623 SUBTASKS HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index 5c5ddc87149..2f525ad6b08 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -204,6 +204,7 @@ public class SecondaryNameNode implements Runnable { DFS_SECONDARY_NAMENODE_USER_NAME_KEY, infoBindAddress); } // initiate Java VM metrics + DefaultMetricsSystem.initialize("SecondaryNameNode"); JvmMetrics.create("SecondaryNameNode", conf.get(DFS_METRICS_SESSION_ID_KEY), DefaultMetricsSystem.instance()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java index 16b2ac6c65e..b234d6127aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java @@ -17,42 +17,59 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import static org.junit.Assert.*; +import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.net.MalformedURLException; import java.net.URL; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.junit.AfterClass; +import org.junit.BeforeClass; import org.junit.Test; public class TestSecondaryWebUi { + + private static MiniDFSCluster cluster; + private static SecondaryNameNode snn; + private static Configuration conf = new Configuration(); + + @BeforeClass + public static void setUpCluster() throws IOException { + conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, + "0.0.0.0:0"); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0) + .build(); + cluster.waitActive(); + + snn = new SecondaryNameNode(conf); + } + + @AfterClass + public static void shutDownCluster() { + if (cluster != null) { + cluster.shutdown(); + } + if (snn != null) { + snn.shutdown(); + } + } @Test public void testSecondaryWebUi() throws IOException { - Configuration conf = new Configuration(); - conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, - "0.0.0.0:0"); - MiniDFSCluster cluster = null; - SecondaryNameNode snn = null; - try { - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0) - .build(); - cluster.waitActive(); - - snn = new SecondaryNameNode(conf); - String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + - SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp")); - assertTrue(pageContents.contains("Last Checkpoint Time")); - } finally { - if (cluster != null) { - cluster.shutdown(); - } - if (snn != null) { - snn.shutdown(); - } - } + String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + + SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp")); + assertTrue(pageContents.contains("Last Checkpoint Time")); + } + + @Test + public void testSecondaryWebJmx() throws MalformedURLException, IOException { + String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + + SecondaryNameNode.getHttpAddress(conf).getPort() + "/jmx")); + assertTrue(pageContents.contains( + "Hadoop:service=SecondaryNameNode,name=JvmMetrics")); } }