HDFS-6270. Merge r1590197 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1590198 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Haohui Mai 2014-04-25 23:50:48 +00:00
parent 2a76cade32
commit 86f1b41d28
4 changed files with 30 additions and 10 deletions

View File

@ -154,6 +154,9 @@ Release 2.5.0 - UNRELEASED
HDFS-5865. Update OfflineImageViewer document. (Akira Ajisaka via wheat9)
HDFS-6270. Secondary namenode status page shows transaction count in bytes.
(Benoy Antony via wheat9)
Release 2.4.1 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -78,7 +78,6 @@ import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.hadoop.util.VersionInfo;
import javax.management.ObjectName;
@ -130,16 +129,15 @@ public class SecondaryNameNode implements Runnable,
@Override
public String toString() {
return getClass().getSimpleName() + " Status"
+ "\nName Node Address : " + nameNodeAddr
+ "\nStart Time : " + new Date(starttime)
+ "\nLast Checkpoint : " + (lastCheckpointTime == 0? "--":
+ "\nName Node Address : " + nameNodeAddr
+ "\nStart Time : " + new Date(starttime)
+ "\nLast Checkpoint : " + (lastCheckpointTime == 0? "--":
((Time.monotonicNow() - lastCheckpointTime) / 1000))
+ " seconds ago"
+ "\nCheckpoint Period : " + checkpointConf.getPeriod() + " seconds"
+ "\nCheckpoint Size : " + StringUtils.byteDesc(checkpointConf.getTxnCount())
+ " (= " + checkpointConf.getTxnCount() + " bytes)"
+ "\nCheckpoint Dirs : " + checkpointDirs
+ "\nCheckpoint Edits Dirs: " + checkpointEditsDirs;
+ "\nCheckpoint Period : " + checkpointConf.getPeriod() + " seconds"
+ "\nCheckpoint Transactions: " + checkpointConf.getTxnCount()
+ "\nCheckpoint Dirs : " + checkpointDirs
+ "\nCheckpoint Edits Dirs : " + checkpointEditsDirs;
}
@VisibleForTesting

View File

@ -66,7 +66,7 @@
<tr><th>Started</th><td>{StartTime|date_tostring}</td></tr>
<tr><th>Last Checkpoint</th><td>{@if cond="{LastCheckpointTime} === 0"}Never{:else}{LastCheckpointTime|date_tostring}{/if}</td></tr>
<tr><th>Checkpoint Period</th><td>{CheckpointPeriod} seconds</td></tr>
<tr><th>Checkpoint Size</th><td>{TxnCount|fmt_bytes}</td></tr>
<tr><th>Checkpoint Transactions</th><td>{TxnCount}</td></tr>
</table>
<div class="page-header"><h2><small>Checkpoint Image URI</small></h2></div>

View File

@ -17,8 +17,11 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_TXNS_KEY;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.Assert;
@ -28,6 +31,7 @@ import org.junit.Test;
import javax.management.*;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.URL;
public class TestSecondaryWebUi {
@ -39,6 +43,7 @@ public class TestSecondaryWebUi {
public static void setUpCluster() throws IOException {
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
"0.0.0.0:0");
conf.setLong(DFS_NAMENODE_CHECKPOINT_TXNS_KEY, 500);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0)
.build();
cluster.waitActive();
@ -73,4 +78,18 @@ public class TestSecondaryWebUi {
Assert.assertArrayEquals(checkpointEditlogDir,
snn.getCheckpointEditlogDirectories());
}
@Test
public void testSecondaryWebUiJsp()
throws IOException, MalformedObjectNameException,
AttributeNotFoundException, MBeanException,
ReflectionException, InstanceNotFoundException {
String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp"));
Assert.assertTrue("Didn't find \"Last Checkpoint\"",
pageContents.contains("Last Checkpoint"));
Assert.assertTrue("Didn't find Checkpoint Transactions: 500",
pageContents.contains("Checkpoint Transactions: 500"));
}
}