MAPREDUCE-4811. JobHistoryServer should show when it was started in WebUI About page. Contributed by Ravi Prakash

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1412377 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Darrell Lowe 2012-11-22 00:05:26 +00:00
parent 0f1899ee19
commit 6c5f37e46d
6 changed files with 30 additions and 5 deletions

View File

@ -586,6 +586,9 @@ Release 0.23.6 - UNRELEASED
IMPROVEMENTS IMPROVEMENTS
MAPREDUCE-4811. JobHistoryServer should show when it was started in WebUI
About page (Ravi Prakash via jlowe)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -51,6 +51,8 @@ public class JobHistoryServer extends CompositeService {
*/ */
public static final int SHUTDOWN_HOOK_PRIORITY = 30; public static final int SHUTDOWN_HOOK_PRIORITY = 30;
public static final long historyServerTimeStamp = System.currentTimeMillis();
private static final Log LOG = LogFactory.getLog(JobHistoryServer.class); private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
private HistoryContext historyContext; private HistoryContext historyContext;
private HistoryClientService clientService; private HistoryClientService clientService;

View File

@ -21,7 +21,9 @@ package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID; import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
@ -47,7 +49,9 @@ public class HsAboutPage extends HsView {
@Override protected Class<? extends SubView> content() { @Override protected Class<? extends SubView> content() {
HistoryInfo info = new HistoryInfo(); HistoryInfo info = new HistoryInfo();
info("History Server"). info("History Server").
_("BuildVersion", info.getHadoopBuildVersion() + " on " + info.getHadoopVersionBuiltOn()); _("BuildVersion", info.getHadoopBuildVersion()
+ " on " + info.getHadoopVersionBuiltOn()).
_("History Server started on", Times.format(info.getStartedOn()));
return InfoBlock.class; return InfoBlock.class;
} }
} }

View File

@ -22,17 +22,20 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionInfo;
@XmlRootElement @XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD) @XmlAccessorType(XmlAccessType.FIELD)
public class HistoryInfo { public class HistoryInfo {
protected long startedOn;
protected String hadoopVersion; protected String hadoopVersion;
protected String hadoopBuildVersion; protected String hadoopBuildVersion;
protected String hadoopVersionBuiltOn; protected String hadoopVersionBuiltOn;
public HistoryInfo() { public HistoryInfo() {
this.startedOn = JobHistoryServer.historyServerTimeStamp;
this.hadoopVersion = VersionInfo.getVersion(); this.hadoopVersion = VersionInfo.getVersion();
this.hadoopBuildVersion = VersionInfo.getBuildVersion(); this.hadoopBuildVersion = VersionInfo.getBuildVersion();
this.hadoopVersionBuiltOn = VersionInfo.getDate(); this.hadoopVersionBuiltOn = VersionInfo.getDate();
@ -50,4 +53,8 @@ public class HistoryInfo {
return this.hadoopVersionBuiltOn; return this.hadoopVersionBuiltOn;
} }
public long getStartedOn() {
return this.startedOn;
}
} }

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext; import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory; import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.Clock;
@ -344,21 +345,24 @@ public class TestHsWebServices extends JerseyTest {
} }
public void verifyHsInfoGeneric(String hadoopVersionBuiltOn, public void verifyHsInfoGeneric(String hadoopVersionBuiltOn,
String hadoopBuildVersion, String hadoopVersion) { String hadoopBuildVersion, String hadoopVersion, long startedon) {
WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn", WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
VersionInfo.getDate(), hadoopVersionBuiltOn); VersionInfo.getDate(), hadoopVersionBuiltOn);
WebServicesTestUtils.checkStringMatch("hadoopBuildVersion", WebServicesTestUtils.checkStringMatch("hadoopBuildVersion",
VersionInfo.getBuildVersion(), hadoopBuildVersion); VersionInfo.getBuildVersion(), hadoopBuildVersion);
WebServicesTestUtils.checkStringMatch("hadoopVersion", WebServicesTestUtils.checkStringMatch("hadoopVersion",
VersionInfo.getVersion(), hadoopVersion); VersionInfo.getVersion(), hadoopVersion);
assertEquals("startedOn doesn't match: ",
JobHistoryServer.historyServerTimeStamp, startedon);
} }
public void verifyHSInfo(JSONObject info, TestAppContext ctx) public void verifyHSInfo(JSONObject info, TestAppContext ctx)
throws JSONException { throws JSONException {
assertEquals("incorrect number of elements", 3, info.length()); assertEquals("incorrect number of elements", 4, info.length());
verifyHsInfoGeneric(info.getString("hadoopVersionBuiltOn"), verifyHsInfoGeneric(info.getString("hadoopVersionBuiltOn"),
info.getString("hadoopBuildVersion"), info.getString("hadoopVersion")); info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"),
info.getLong("startedOn"));
} }
public void verifyHSInfoXML(String xml, TestAppContext ctx) public void verifyHSInfoXML(String xml, TestAppContext ctx)
@ -376,7 +380,8 @@ public class TestHsWebServices extends JerseyTest {
verifyHsInfoGeneric( verifyHsInfoGeneric(
WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"), WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"), WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
WebServicesTestUtils.getXmlString(element, "hadoopVersion")); WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
WebServicesTestUtils.getXmlLong(element, "startedOn"));
} }
} }

View File

@ -56,6 +56,8 @@ History Server REST API's.
*---------------+--------------+-------------------------------+ *---------------+--------------+-------------------------------+
|| Item || Data Type || Description | || Item || Data Type || Description |
*---------------+--------------+-------------------------------+ *---------------+--------------+-------------------------------+
| startedOn | long | The time the history server was started (in ms since epoch)|
*---------------+--------------+-------------------------------+
| hadoopVersion | string | Version of hadoop common | | hadoopVersion | string | Version of hadoop common |
*---------------+--------------+-------------------------------+ *---------------+--------------+-------------------------------+
| hadoopBuildVersion | string | Hadoop common build string with build version, user, and checksum | | hadoopBuildVersion | string | Hadoop common build string with build version, user, and checksum |
@ -87,6 +89,7 @@ History Server REST API's.
+---+ +---+
{ {
"historyInfo" : { "historyInfo" : {
"startedOn":1353512830963,
"hadoopVersionBuiltOn" : "Wed Jan 11 21:18:36 UTC 2012", "hadoopVersionBuiltOn" : "Wed Jan 11 21:18:36 UTC 2012",
"hadoopBuildVersion" : "0.23.1-SNAPSHOT from 1230253 by user1 source checksum bb6e554c6d50b0397d826081017437a7", "hadoopBuildVersion" : "0.23.1-SNAPSHOT from 1230253 by user1 source checksum bb6e554c6d50b0397d826081017437a7",
"hadoopVersion" : "0.23.1-SNAPSHOT" "hadoopVersion" : "0.23.1-SNAPSHOT"
@ -117,6 +120,7 @@ History Server REST API's.
+---+ +---+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<historyInfo> <historyInfo>
<startedOn>1353512830963</startedOn>
<hadoopVersion>0.23.1-SNAPSHOT</hadoopVersion> <hadoopVersion>0.23.1-SNAPSHOT</hadoopVersion>
<hadoopBuildVersion>0.23.1-SNAPSHOT from 1230253 by user1 source checksum bb6e554c6d50b0397d826081017437a7</hadoopBuildVersion> <hadoopBuildVersion>0.23.1-SNAPSHOT from 1230253 by user1 source checksum bb6e554c6d50b0397d826081017437a7</hadoopBuildVersion>
<hadoopVersionBuiltOn>Wed Jan 11 21:18:36 UTC 2012</hadoopVersionBuiltOn> <hadoopVersionBuiltOn>Wed Jan 11 21:18:36 UTC 2012</hadoopVersionBuiltOn>