From 4ef2598d5d31b5a8e372439ca4b169e0b6918451 Mon Sep 17 00:00:00 2001 From: Haohui Mai Date: Thu, 24 Apr 2014 07:17:17 +0000 Subject: [PATCH] HDFS-6278. Merge r1589613 from trunk. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1589616 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../server/namenode/SecondaryNameNode.java | 71 +++++++++++--- .../namenode/SecondaryNameNodeInfoMXBean.java | 52 ++++++++++ .../server/namenode/VersionInfoMXBean.java | 35 +++++++ .../src/main/webapps/hdfs/dfshealth.html | 2 +- .../src/main/webapps/hdfs/explorer.html | 2 +- .../src/main/webapps/secondary/index.html | 26 +++-- .../src/main/webapps/secondary/snn.js | 68 +++++++++++++ .../src/main/webapps/secondary/status.html | 96 +++++++++++++++++++ .../main/webapps/{hdfs => static}/dfs-dust.js | 0 .../hdfs/server/namenode/TestCheckpoint.java | 4 +- .../server/namenode/TestSecondaryWebUi.java | 40 ++++---- 12 files changed, 353 insertions(+), 45 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNodeInfoMXBean.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/VersionInfoMXBean.java create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/snn.js create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/status.html rename hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/{hdfs => static}/dfs-dust.js (100%) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 28e6f656dd8..7726b27dc07 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -57,6 +57,8 @@ Release 2.5.0 - UNRELEASED HDFS-6265. Prepare HDFS codebase for JUnit 4.11. (cnauroth) + HDFS-6278. Create HTML5-based UI for SNN. (wheat9) + OPTIMIZATIONS HDFS-6214. Webhdfs has poor throughput for files >2GB (daryn) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index ecffd508182..c0d7af96286 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -27,11 +27,9 @@ import java.net.URI; import java.net.URL; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; -import java.util.Collection; -import java.util.Date; -import java.util.Iterator; -import java.util.List; +import java.util.*; +import com.google.common.collect.Lists; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.HelpFormatter; @@ -70,6 +68,7 @@ import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.source.JvmMetrics; +import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; @@ -80,6 +79,9 @@ import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; +import org.apache.hadoop.util.VersionInfo; + +import javax.management.ObjectName; /********************************************************** * The Secondary NameNode is a helper to the primary NameNode. @@ -95,7 +97,8 @@ import com.google.common.collect.ImmutableList; * **********************************************************/ @InterfaceAudience.Private -public class SecondaryNameNode implements Runnable { +public class SecondaryNameNode implements Runnable, + SecondaryNameNodeInfoMXBean { static{ HdfsConfiguration.init(); @@ -122,7 +125,7 @@ public class SecondaryNameNode implements Runnable { private FSNamesystem namesystem; private Thread checkpointThread; - + private ObjectName nameNodeStatusBeanName; @Override public String toString() { @@ -169,11 +172,6 @@ public class SecondaryNameNode implements Runnable { this.namenode = namenode; } - @VisibleForTesting - List getCheckpointDirs() { - return ImmutableList.copyOf(checkpointDirs); - } - /** * Create a connection to the primary namenode. */ @@ -265,6 +263,9 @@ public class SecondaryNameNode implements Runnable { DFSConfigKeys.DFS_SECONDARY_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY); + nameNodeStatusBeanName = MBeans.register("SecondaryNameNode", + "SecondaryNameNodeInfo", this); + infoServer = builder.build(); infoServer.setAttribute("secondary.name.node", this); @@ -330,6 +331,10 @@ public class SecondaryNameNode implements Runnable { } catch (Exception e) { LOG.warn("Exception shutting down SecondaryNameNode", e); } + if (nameNodeStatusBeanName != null) { + MBeans.unregister(nameNodeStatusBeanName); + nameNodeStatusBeanName = null; + } try { if (checkpointImage != null) { checkpointImage.close(); @@ -679,6 +684,50 @@ public class SecondaryNameNode implements Runnable { checkpointThread.start(); } + @Override // SecondaryNameNodeInfoMXXBean + public String getHostAndPort() { + return NetUtils.getHostPortString(nameNodeAddr); + } + + @Override // SecondaryNameNodeInfoMXXBean + public long getStartTime() { + return starttime; + } + + @Override // SecondaryNameNodeInfoMXXBean + public long getLastCheckpointTime() { + return lastCheckpointTime; + } + + @Override // SecondaryNameNodeInfoMXXBean + public String[] getCheckpointDirectories() { + ArrayList r = Lists.newArrayListWithCapacity(checkpointDirs.size()); + for (URI d : checkpointDirs) { + r.add(d.toString()); + } + return r.toArray(new String[r.size()]); + } + + @Override // SecondaryNameNodeInfoMXXBean + public String[] getCheckpointEditlogDirectories() { + ArrayList r = Lists.newArrayListWithCapacity(checkpointEditsDirs.size()); + for (URI d : checkpointEditsDirs) { + r.add(d.toString()); + } + return r.toArray(new String[r.size()]); + } + + @Override // VersionInfoMXBean + public String getCompileInfo() { + return VersionInfo.getDate() + " by " + VersionInfo.getUser() + + " from " + VersionInfo.getBranch(); + } + + @Override // VersionInfoMXBean + public String getSoftwareVersion() { + return VersionInfo.getVersion(); + } + /** * Container for parsed command-line options. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNodeInfoMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNodeInfoMXBean.java new file mode 100644 index 00000000000..01f6fac99c7 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNodeInfoMXBean.java @@ -0,0 +1,52 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +/** + * JMX information of the secondary NameNode + */ +@InterfaceAudience.Private +@InterfaceStability.Evolving +public interface SecondaryNameNodeInfoMXBean extends VersionInfoMXBean { + /** + * Gets the host and port colon separated. + */ + public String getHostAndPort(); + + /** + * @return the timestamp of when the SNN starts + */ + public long getStartTime(); + + /** + * @return the timestamp of the last checkpoint + */ + public long getLastCheckpointTime(); + + /** + * @return the directories that store the checkpoint images + */ + public String[] getCheckpointDirectories(); + /** + * @return the directories that store the edit logs + */ + public String[] getCheckpointEditlogDirectories(); +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/VersionInfoMXBean.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/VersionInfoMXBean.java new file mode 100644 index 00000000000..bd4582957b3 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/VersionInfoMXBean.java @@ -0,0 +1,35 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; + +@InterfaceAudience.Private +@InterfaceStability.Evolving +public interface VersionInfoMXBean { + /** + * @return the compilation information which contains date, user and branch + */ + public String getCompileInfo(); + + /** + * @return the software version + */ + public String getSoftwareVersion(); +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html index e0d509354f9..8ce540e1c8b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html @@ -351,7 +351,7 @@ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html index de344833d00..c5094684fdc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html @@ -122,7 +122,7 @@
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/index.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/index.html index 988f03de42d..97e0207e06f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/index.html +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/index.html @@ -1,5 +1,3 @@ - - -Hadoop Administration - + + + + + Hadoop Administration + +

Hadoop Administration

- -
    -
  • Status
  • + - - - + + \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/snn.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/snn.js new file mode 100644 index 00000000000..ff2e7850f81 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/snn.js @@ -0,0 +1,68 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +(function () { + "use strict"; + + var data = {}; + var outstanding_requests = 2; + + dust.loadSource(dust.compile($('#tmpl-snn').html(), 'snn')); + + function show_error_msg(msg) { + $('#alert-panel-body').html(msg); + $('#alert-panel').show(); + } + + function finished_request() { + outstanding_requests--; + if (outstanding_requests == 0) { + if (data.snn !== undefined && data.conf !== undefined) { + var conf = data.conf; + data.snn.CheckpointPeriod = conf['dfs.namenode.checkpoint.period']; + data.snn.TxnCount = conf['dfs.namenode.checkpoint.txns']; + render(); + } else { + show_error_msg('Failed to load the information.'); + } + } + } + + function load() { + $.getJSON('/jmx?qry=Hadoop:service=SecondaryNameNode,name=SecondaryNameNodeInfo', function(resp) { + data.snn = resp.beans[0]; + }).always(finished_request); + + $.ajax({'url': '/conf', 'dataType': 'xml'}).done(function(d) { + var $xml = $(d); + var confs = {}; + $xml.find('property').each(function(idx,v) { + confs[$(v).find('name').text()] = $(v).find('value').text(); + }); + data.conf = confs; + }).always(finished_request); + } + + function render() { + dust.render('snn', data, function(err, out) { + $('#tab-overview').html(out); + $('#tab-overview').addClass('active'); + }); + } + + load(); +})(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/status.html b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/status.html new file mode 100644 index 00000000000..86257e96e5c --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/secondary/status.html @@ -0,0 +1,96 @@ + + + + + + + SecondaryNamenode information + + + + + +
    + +
    +
    + +
    +
    +
    + +
    +
    +
    + +
    +
    +

    Hadoop, 2014.

    +
    +
    + + + + + + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dfs-dust.js similarity index 100% rename from hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfs-dust.js rename to hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/static/dfs-dust.js diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java index 73abb5ceb10..f7ebfe580b6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java @@ -2444,8 +2444,8 @@ public class TestCheckpoint { private static List getCheckpointCurrentDirs(SecondaryNameNode secondary) { List ret = Lists.newArrayList(); - for (URI u : secondary.getCheckpointDirs()) { - File checkpointDir = new File(u.getPath()); + for (String u : secondary.getCheckpointDirectories()) { + File checkpointDir = new File(URI.create(u).getPath()); ret.add(new File(checkpointDir, "current")); } return ret; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java index aba96e1c218..5afcc69e533 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java @@ -17,20 +17,18 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import static org.junit.Assert.assertTrue; - -import java.io.IOException; -import java.net.MalformedURLException; -import java.net.URL; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.AfterClass; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import javax.management.*; +import java.io.IOException; +import java.lang.management.ManagementFactory; + public class TestSecondaryWebUi { private static MiniDFSCluster cluster; @@ -59,18 +57,20 @@ public class TestSecondaryWebUi { } @Test - public void testSecondaryWebUi() throws IOException { - String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + - SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp")); - assertTrue("Didn't find \"Last Checkpoint\"", - pageContents.contains("Last Checkpoint")); - } - - @Test - public void testSecondaryWebJmx() throws MalformedURLException, IOException { - String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" + - SecondaryNameNode.getHttpAddress(conf).getPort() + "/jmx")); - assertTrue(pageContents.contains( - "Hadoop:service=SecondaryNameNode,name=JvmMetrics")); + public void testSecondaryWebUi() + throws IOException, MalformedObjectNameException, + AttributeNotFoundException, MBeanException, + ReflectionException, InstanceNotFoundException { + MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); + ObjectName mxbeanName = new ObjectName( + "Hadoop:service=SecondaryNameNode,name=SecondaryNameNodeInfo"); + + String[] checkpointDir = (String[]) mbs.getAttribute(mxbeanName, + "CheckpointDirectories"); + Assert.assertArrayEquals(checkpointDir, snn.getCheckpointDirectories()); + String[] checkpointEditlogDir = (String[]) mbs.getAttribute(mxbeanName, + "CheckpointEditlogDirectories"); + Assert.assertArrayEquals(checkpointEditlogDir, + snn.getCheckpointEditlogDirectories()); } }