MAPREDUCE-3299. Added AMInfo table to the MR AM job pages to list all the job-attempts when AM restarts and recovers. (Jonathan Eagles via vinodkv)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1229766 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2012-01-10 22:26:45 +00:00
parent 44e0bb831b
commit bc374626be
12 changed files with 379 additions and 31 deletions

View File

@ -171,6 +171,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3382. Enhanced MR AM to use a proxy to ping the job-end MAPREDUCE-3382. Enhanced MR AM to use a proxy to ping the job-end
notification URL. (Ravi Prakash via vinodkv) notification URL. (Ravi Prakash via vinodkv)
MAPREDUCE-3299. Added AMInfo table to the MR AM job pages to list all the
job-attempts when AM restarts and recovers. (Jonathan Eagles via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar

View File

@ -33,6 +33,7 @@ import javax.ws.rs.core.Response.Status;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@ -42,6 +43,8 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptsInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
@ -210,6 +213,21 @@ public class AMWebServices {
return new JobInfo(job, hasAccess(job, hsr)); return new JobInfo(job, hasAccess(job, hsr));
} }
@GET
@Path("/jobs/{jobid}/jobattempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
Job job = getJobFromJobIdString(jid, appCtx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(
job.getID()), job.getUserName());
amAttempts.add(attempt);
}
return amAttempts;
}
@GET @GET
@Path("/jobs/{jobid}/counters") @Path("/jobs/{jobid}/counters")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })

View File

@ -30,6 +30,8 @@ import javax.ws.rs.ext.ContextResolver;
import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.Provider;
import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptsInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
@ -54,22 +56,22 @@ public class JAXBContextResolver implements ContextResolver<JAXBContext> {
private JAXBContext context; private JAXBContext context;
private final Set<Class> types; private final Set<Class> types;
// you have to specify all the dao classes here // you have to specify all the dao classes here
private final Class[] cTypes = {AppInfo.class, CounterInfo.class, private final Class[] cTypes = {AMAttemptInfo.class, AMAttemptsInfo.class,
JobTaskAttemptCounterInfo.class, JobTaskCounterInfo.class, AppInfo.class, CounterInfo.class, JobTaskAttemptCounterInfo.class,
TaskCounterGroupInfo.class, ConfInfo.class, JobCounterInfo.class, JobTaskCounterInfo.class, TaskCounterGroupInfo.class, ConfInfo.class,
TaskCounterInfo.class, CounterGroupInfo.class, JobInfo.class, JobCounterInfo.class, TaskCounterInfo.class, CounterGroupInfo.class,
JobsInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class, JobInfo.class, JobsInfo.class, ReduceTaskAttemptInfo.class,
TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class, TaskAttemptInfo.class, TaskInfo.class, TasksInfo.class,
ConfEntryInfo.class}; TaskAttemptsInfo.class, ConfEntryInfo.class};
public JAXBContextResolver() throws Exception { public JAXBContextResolver() throws Exception {
this.types = new HashSet<Class>(Arrays.asList(cTypes)); this.types = new HashSet<Class>(Arrays.asList(cTypes));
this.context = new JSONJAXBContext(JSONConfiguration.natural(). this.context = new JSONJAXBContext(JSONConfiguration.natural().
rootUnwrapping(false).build(), cTypes); rootUnwrapping(false).build(), cTypes);
} }
@Override @Override
public JAXBContext getContext(Class<?> objectType) { public JAXBContext getContext(Class<?> objectType) {
return (types.contains(objectType)) ? context : null; return (types.contains(objectType)) ? context : null;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.app.webapp;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID; import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.yarn.util.StringHelper.join; import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
@ -28,14 +29,22 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
import java.util.Date; import java.util.Date;
import java.util.List;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock;
@ -62,6 +71,11 @@ public class JobBlock extends HtmlBlock {
p()._("Sorry, ", jid, " not found.")._(); p()._("Sorry, ", jid, " not found.")._();
return; return;
} }
List<AMInfo> amInfos = job.getAMInfos();
String amString =
amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
JobInfo jinfo = new JobInfo(job, true); JobInfo jinfo = new JobInfo(job, true);
info("Job Overview"). info("Job Overview").
_("Job Name:", jinfo.getName()). _("Job Name:", jinfo.getName()).
@ -69,10 +83,40 @@ public class JobBlock extends HtmlBlock {
_("Uberized:", jinfo.isUberized()). _("Uberized:", jinfo.isUberized()).
_("Started:", new Date(jinfo.getStartTime())). _("Started:", new Date(jinfo.getStartTime())).
_("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime())); _("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
html. DIV<Hamlet> div = html.
_(InfoBlock.class). _(InfoBlock.class).
div(_INFO_WRAP). div(_INFO_WRAP);
// MRAppMasters Table
TABLE<DIV<Hamlet>> table = div.table("#job");
table.
tr().
th(amString).
_().
tr().
th(_TH, "Attempt Number").
th(_TH, "Start Time").
th(_TH, "Node").
th(_TH, "Logs").
_();
for (AMInfo amInfo : amInfos) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo,
jinfo.getId(), jinfo.getUserName());
table.tr().
td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url("http://", attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getLogsLink()),
"logs")._().
_();
}
table._();
div._();
html.div(_INFO_WRAP).
// Tasks table // Tasks table
table("#job"). table("#job").
tr(). tr().

View File

@ -0,0 +1,95 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils;
@XmlRootElement(name = "jobAttempt")
@XmlAccessorType(XmlAccessType.FIELD)
public class AMAttemptInfo {
protected String nodeHttpAddress;
protected String nodeId;
protected int id;
protected long startTime;
protected String containerId;
protected String logsLink;
public AMAttemptInfo() {
}
public AMAttemptInfo(AMInfo amInfo, String jobId, String user) {
this.nodeHttpAddress = "";
this.nodeId = "";
String nmHost = amInfo.getNodeManagerHost();
int nmHttpPort = amInfo.getNodeManagerHttpPort();
int nmPort = amInfo.getNodeManagerPort();
if (nmHost != null) {
this.nodeHttpAddress = nmHost + ":" + nmHttpPort;
NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
this.nodeId = nodeId.toString();
}
this.id = amInfo.getAppAttemptId().getAttemptId();
this.startTime = amInfo.getStartTime();
this.containerId = "";
this.logsLink = "";
ContainerId containerId = amInfo.getContainerId();
if (containerId != null) {
this.containerId = containerId.toString();
this.logsLink = join("http://" + nodeHttpAddress,
ujoin("node", "containerlogs", this.containerId));
}
}
public String getNodeHttpAddress() {
return this.nodeHttpAddress;
}
public String getNodeId() {
return this.nodeId;
}
public int getAttemptId() {
return this.id;
}
public long getStartTime() {
return this.startTime;
}
public String getContainerId() {
return this.containerId;
}
public String getLogsLink() {
return this.logsLink;
}
}

View File

@ -0,0 +1,45 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by joblicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "jobAttempts")
@XmlAccessorType(XmlAccessType.FIELD)
public class AMAttemptsInfo {
@XmlElement(name = "jobAttempt")
protected ArrayList<AMAttemptInfo> attempt = new ArrayList<AMAttemptInfo>();
public AMAttemptsInfo() {
} // JAXB needs this
public void add(AMAttemptInfo info) {
this.attempt.add(info);
}
public ArrayList<AMAttemptInfo> getAttempts() {
return this.attempt;
}
}

View File

@ -214,7 +214,7 @@ public class JobInfo {
return this.state.toString(); return this.state.toString();
} }
public String getUser() { public String getUserName() {
return this.user; return this.user;
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app.webapp; package org.apache.hadoop.mapreduce.v2.app.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -33,6 +34,7 @@ import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobACL; import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport; import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
@ -44,6 +46,7 @@ import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@ -76,6 +79,7 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
* /ws/v1/mapreduce/jobs * /ws/v1/mapreduce/jobs
* /ws/v1/mapreduce/jobs/{jobid} * /ws/v1/mapreduce/jobs/{jobid}
* /ws/v1/mapreduce/jobs/{jobid}/counters * /ws/v1/mapreduce/jobs/{jobid}/counters
* /ws/v1/mapreduce/jobs/{jobid}/jobattempts
*/ */
public class TestAMWebServicesJobs extends JerseyTest { public class TestAMWebServicesJobs extends JerseyTest {
@ -777,4 +781,136 @@ public class TestAMWebServicesJobs extends JerseyTest {
} }
} }
@Test
public void testJobAttempts() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobAttempts");
verifyJobAttempts(info, jobsMap.get(id));
}
}
@Test
public void testJobAttemptsSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobAttempts");
verifyJobAttempts(info, jobsMap.get(id));
}
}
@Test
public void testJobAttemptsDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobAttempts");
verifyJobAttempts(info, jobsMap.get(id));
}
}
@Test
public void testJobAttemptsXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList attempts = dom.getElementsByTagName("jobAttempts");
assertEquals("incorrect number of elements", 1, attempts.getLength());
NodeList info = dom.getElementsByTagName("jobAttempt");
verifyJobAttemptsXML(info, jobsMap.get(id));
}
}
public void verifyJobAttempts(JSONObject info, Job job)
throws JSONException {
JSONArray attempts = info.getJSONArray("jobAttempt");
assertEquals("incorrect number of elements", 2, attempts.length());
for (int i = 0; i < attempts.length(); i++) {
JSONObject attempt = attempts.getJSONObject(i);
verifyJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"),
attempt.getString("nodeId"), attempt.getInt("id"),
attempt.getLong("startTime"), attempt.getString("containerId"),
attempt.getString("logsLink"));
}
}
public void verifyJobAttemptsXML(NodeList nodes, Job job) {
assertEquals("incorrect number of elements", 2, nodes.getLength());
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyJobAttemptsGeneric(job,
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
WebServicesTestUtils.getXmlString(element, "nodeId"),
WebServicesTestUtils.getXmlInt(element, "id"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlString(element, "containerId"),
WebServicesTestUtils.getXmlString(element, "logsLink"));
}
}
public void verifyJobAttemptsGeneric(Job job, String nodeHttpAddress,
String nodeId, int id, long startTime, String containerId, String logsLink) {
boolean attemptFound = false;
for (AMInfo amInfo : job.getAMInfos()) {
if (amInfo.getAppAttemptId().getAttemptId() == id) {
attemptFound = true;
String nmHost = amInfo.getNodeManagerHost();
int nmHttpPort = amInfo.getNodeManagerHttpPort();
int nmPort = amInfo.getNodeManagerPort();
WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
+ nmHttpPort, nodeHttpAddress);
WebServicesTestUtils.checkStringMatch("nodeId",
BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId);
assertTrue("startime not greater than 0", startTime > 0);
WebServicesTestUtils.checkStringMatch("containerId", amInfo
.getContainerId().toString(), containerId);
String localLogsLink = ujoin("node", "containerlogs", containerId);
assertTrue("logsLink", logsLink.contains(localLogsLink));
}
}
assertTrue("attempt: " + id + " was not found", attemptFound);
}
} }

View File

@ -229,7 +229,7 @@ public class HsWebServices {
} }
@GET @GET
@Path("/mapreduce/jobs/{jobid}/attempts") @Path("/mapreduce/jobs/{jobid}/jobattempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.BuilderUtils;
@XmlRootElement(name = "amAttempt") @XmlRootElement(name = "jobAttempt")
@XmlAccessorType(XmlAccessType.FIELD) @XmlAccessorType(XmlAccessType.FIELD)
public class AMAttemptInfo { public class AMAttemptInfo {
@ -52,12 +52,14 @@ public class AMAttemptInfo {
this.nodeHttpAddress = ""; this.nodeHttpAddress = "";
this.nodeId = ""; this.nodeId = "";
String nmHost = amInfo.getNodeManagerHost(); String nmHost = amInfo.getNodeManagerHost();
int nmPort = amInfo.getNodeManagerHttpPort(); int nmHttpPort = amInfo.getNodeManagerHttpPort();
int nmPort = amInfo.getNodeManagerPort();
if (nmHost != null) { if (nmHost != null) {
this.nodeHttpAddress = nmHost + ":" + nmPort; this.nodeHttpAddress = nmHost + ":" + nmHttpPort;
NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort); NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
this.nodeId = nodeId.toString(); this.nodeId = nodeId.toString();
} }
this.id = amInfo.getAppAttemptId().getAttemptId(); this.id = amInfo.getAppAttemptId().getAttemptId();
this.startTime = amInfo.getStartTime(); this.startTime = amInfo.getStartTime();
this.containerId = ""; this.containerId = "";

View File

@ -21,12 +21,14 @@ import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "attempts") @XmlRootElement(name = "jobAttempts")
@XmlAccessorType(XmlAccessType.FIELD) @XmlAccessorType(XmlAccessType.FIELD)
public class AMAttemptsInfo { public class AMAttemptsInfo {
@XmlElement(name = "jobAttempt")
protected ArrayList<AMAttemptInfo> attempt = new ArrayList<AMAttemptInfo>(); protected ArrayList<AMAttemptInfo> attempt = new ArrayList<AMAttemptInfo>();
public AMAttemptsInfo() { public AMAttemptsInfo() {

View File

@ -77,7 +77,7 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
* *
* /ws/v1/history/mapreduce/jobs /ws/v1/history/mapreduce/jobs/{jobid} * /ws/v1/history/mapreduce/jobs /ws/v1/history/mapreduce/jobs/{jobid}
* /ws/v1/history/mapreduce/jobs/{jobid}/counters * /ws/v1/history/mapreduce/jobs/{jobid}/counters
* /ws/v1/history/mapreduce/jobs/{jobid}/attempts * /ws/v1/history/mapreduce/jobs/{jobid}/jobattempts
*/ */
public class TestHsWebServicesJobs extends JerseyTest { public class TestHsWebServicesJobs extends JerseyTest {
@ -626,12 +626,12 @@ public class TestHsWebServicesJobs extends JerseyTest {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history") ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("attempts") .path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class); JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length()); assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("attempts"); JSONObject info = json.getJSONObject("jobAttempts");
verifyHsJobAttempts(info, jobsMap.get(id)); verifyHsJobAttempts(info, jobsMap.get(id));
} }
} }
@ -644,12 +644,12 @@ public class TestHsWebServicesJobs extends JerseyTest {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history") ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("attempts/") .path("mapreduce").path("jobs").path(jobId).path("jobattempts/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class); JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length()); assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("attempts"); JSONObject info = json.getJSONObject("jobAttempts");
verifyHsJobAttempts(info, jobsMap.get(id)); verifyHsJobAttempts(info, jobsMap.get(id));
} }
} }
@ -662,12 +662,12 @@ public class TestHsWebServicesJobs extends JerseyTest {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history") ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("attempts") .path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.get(ClientResponse.class); .get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class); JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length()); assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("attempts"); JSONObject info = json.getJSONObject("jobAttempts");
verifyHsJobAttempts(info, jobsMap.get(id)); verifyHsJobAttempts(info, jobsMap.get(id));
} }
} }
@ -680,7 +680,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history") ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("attempts") .path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class); .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class); String xml = response.getEntity(String.class);
@ -689,9 +689,9 @@ public class TestHsWebServicesJobs extends JerseyTest {
InputSource is = new InputSource(); InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml)); is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is); Document dom = db.parse(is);
NodeList attempts = dom.getElementsByTagName("attempts"); NodeList attempts = dom.getElementsByTagName("jobAttempts");
assertEquals("incorrect number of elements", 1, attempts.getLength()); assertEquals("incorrect number of elements", 1, attempts.getLength());
NodeList info = dom.getElementsByTagName("attempt"); NodeList info = dom.getElementsByTagName("jobAttempt");
verifyHsJobAttemptsXML(info, jobsMap.get(id)); verifyHsJobAttemptsXML(info, jobsMap.get(id));
} }
} }
@ -699,7 +699,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
public void verifyHsJobAttempts(JSONObject info, Job job) public void verifyHsJobAttempts(JSONObject info, Job job)
throws JSONException { throws JSONException {
JSONArray attempts = info.getJSONArray("attempt"); JSONArray attempts = info.getJSONArray("jobAttempt");
assertEquals("incorrect number of elements", 2, attempts.length()); assertEquals("incorrect number of elements", 2, attempts.length());
for (int i = 0; i < attempts.length(); i++) { for (int i = 0; i < attempts.length(); i++) {
JSONObject attempt = attempts.getJSONObject(i); JSONObject attempt = attempts.getJSONObject(i);
@ -732,9 +732,10 @@ public class TestHsWebServicesJobs extends JerseyTest {
if (amInfo.getAppAttemptId().getAttemptId() == id) { if (amInfo.getAppAttemptId().getAttemptId() == id) {
attemptFound = true; attemptFound = true;
String nmHost = amInfo.getNodeManagerHost(); String nmHost = amInfo.getNodeManagerHost();
int nmPort = amInfo.getNodeManagerHttpPort(); int nmHttpPort = amInfo.getNodeManagerHttpPort();
int nmPort = amInfo.getNodeManagerPort();
WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":" WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
+ nmPort, nodeHttpAddress); + nmHttpPort, nodeHttpAddress);
WebServicesTestUtils.checkStringMatch("nodeId", WebServicesTestUtils.checkStringMatch("nodeId",
BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId); BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId);
assertTrue("startime not greater than 0", startTime > 0); assertTrue("startime not greater than 0", startTime > 0);