diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 6a76177bdc9..2517de47695 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -105,6 +105,9 @@ Release 0.23.1 - Unreleased MAPREDUCE-3382. Enhanced MR AM to use a proxy to ping the job-end notification URL. (Ravi Prakash via vinodkv) + MAPREDUCE-3299. Added AMInfo table to the MR AM job pages to list all the + job-attempts when AM restarts and recovers. (Jonathan Eagles via vinodkv) + OPTIMIZATIONS MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 2481b64bd2c..01c5c95e177 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -33,6 +33,7 @@ import javax.ws.rs.core.Response.Status; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobACL; +import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; @@ -42,6 +43,8 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptsInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo; @@ -210,6 +213,21 @@ public class AMWebServices { return new JobInfo(job, hasAccess(job, hsr)); } + @GET + @Path("/jobs/{jobid}/jobattempts") + @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) + public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { + + Job job = getJobFromJobIdString(jid, appCtx); + AMAttemptsInfo amAttempts = new AMAttemptsInfo(); + for (AMInfo amInfo : job.getAMInfos()) { + AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString( + job.getID()), job.getUserName()); + amAttempts.add(attempt); + } + return amAttempts; + } + @GET @Path("/jobs/{jobid}/counters") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java index ec1c151d8bb..2c44baae2c5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java @@ -30,6 +30,8 @@ import javax.ws.rs.ext.ContextResolver; import javax.ws.rs.ext.Provider; import javax.xml.bind.JAXBContext; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptsInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo; @@ -54,22 +56,22 @@ public class JAXBContextResolver implements ContextResolver { private JAXBContext context; private final Set types; - + // you have to specify all the dao classes here - private final Class[] cTypes = {AppInfo.class, CounterInfo.class, - JobTaskAttemptCounterInfo.class, JobTaskCounterInfo.class, - TaskCounterGroupInfo.class, ConfInfo.class, JobCounterInfo.class, - TaskCounterInfo.class, CounterGroupInfo.class, JobInfo.class, - JobsInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class, - TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class, - ConfEntryInfo.class}; - + private final Class[] cTypes = {AMAttemptInfo.class, AMAttemptsInfo.class, + AppInfo.class, CounterInfo.class, JobTaskAttemptCounterInfo.class, + JobTaskCounterInfo.class, TaskCounterGroupInfo.class, ConfInfo.class, + JobCounterInfo.class, TaskCounterInfo.class, CounterGroupInfo.class, + JobInfo.class, JobsInfo.class, ReduceTaskAttemptInfo.class, + TaskAttemptInfo.class, TaskInfo.class, TasksInfo.class, + TaskAttemptsInfo.class, ConfEntryInfo.class}; + public JAXBContextResolver() throws Exception { this.types = new HashSet(Arrays.asList(cTypes)); this.context = new JSONJAXBContext(JSONConfiguration.natural(). rootUnwrapping(false).build(), cTypes); } - + @Override public JAXBContext getContext(Class objectType) { return (types.contains(objectType)) ? context : null; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java index fec0a50c5c4..c0d7de0f64f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java @@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.app.webapp; import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID; import static org.apache.hadoop.yarn.util.StringHelper.join; +import static org.apache.hadoop.yarn.util.StringHelper.ujoin; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD; @@ -28,14 +29,22 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE; import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH; import java.util.Date; +import java.util.List; +import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AMAttemptInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.util.BuilderUtils; +import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; +import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV; +import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE; import org.apache.hadoop.yarn.webapp.view.HtmlBlock; import org.apache.hadoop.yarn.webapp.view.InfoBlock; @@ -62,6 +71,11 @@ public class JobBlock extends HtmlBlock { p()._("Sorry, ", jid, " not found.")._(); return; } + + List amInfos = job.getAMInfos(); + String amString = + amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters"; + JobInfo jinfo = new JobInfo(job, true); info("Job Overview"). _("Job Name:", jinfo.getName()). @@ -69,10 +83,40 @@ public class JobBlock extends HtmlBlock { _("Uberized:", jinfo.isUberized()). _("Started:", new Date(jinfo.getStartTime())). _("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime())); - html. + DIV div = html. _(InfoBlock.class). - div(_INFO_WRAP). + div(_INFO_WRAP); + // MRAppMasters Table + TABLE> table = div.table("#job"); + table. + tr(). + th(amString). + _(). + tr(). + th(_TH, "Attempt Number"). + th(_TH, "Start Time"). + th(_TH, "Node"). + th(_TH, "Logs"). + _(); + for (AMInfo amInfo : amInfos) { + AMAttemptInfo attempt = new AMAttemptInfo(amInfo, + jinfo.getId(), jinfo.getUserName()); + + table.tr(). + td(String.valueOf(attempt.getAttemptId())). + td(new Date(attempt.getStartTime()).toString()). + td().a(".nodelink", url("http://", attempt.getNodeHttpAddress()), + attempt.getNodeHttpAddress())._(). + td().a(".logslink", url(attempt.getLogsLink()), + "logs")._(). + _(); + } + + table._(); + div._(); + + html.div(_INFO_WRAP). // Tasks table table("#job"). tr(). diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java new file mode 100644 index 00000000000..3e2cd12b0dd --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptInfo.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapreduce.v2.app.webapp.dao; + +import static org.apache.hadoop.yarn.util.StringHelper.join; +import static org.apache.hadoop.yarn.util.StringHelper.ujoin; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlRootElement; + +import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; +import org.apache.hadoop.yarn.api.records.ContainerId; +import org.apache.hadoop.yarn.api.records.NodeId; +import org.apache.hadoop.yarn.util.BuilderUtils; + +@XmlRootElement(name = "jobAttempt") +@XmlAccessorType(XmlAccessType.FIELD) +public class AMAttemptInfo { + + protected String nodeHttpAddress; + protected String nodeId; + protected int id; + protected long startTime; + protected String containerId; + protected String logsLink; + + public AMAttemptInfo() { + } + + public AMAttemptInfo(AMInfo amInfo, String jobId, String user) { + + this.nodeHttpAddress = ""; + this.nodeId = ""; + String nmHost = amInfo.getNodeManagerHost(); + int nmHttpPort = amInfo.getNodeManagerHttpPort(); + int nmPort = amInfo.getNodeManagerPort(); + if (nmHost != null) { + this.nodeHttpAddress = nmHost + ":" + nmHttpPort; + NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort); + this.nodeId = nodeId.toString(); + } + + this.id = amInfo.getAppAttemptId().getAttemptId(); + this.startTime = amInfo.getStartTime(); + this.containerId = ""; + this.logsLink = ""; + ContainerId containerId = amInfo.getContainerId(); + if (containerId != null) { + this.containerId = containerId.toString(); + this.logsLink = join("http://" + nodeHttpAddress, + ujoin("node", "containerlogs", this.containerId)); + } + } + + public String getNodeHttpAddress() { + return this.nodeHttpAddress; + } + + public String getNodeId() { + return this.nodeId; + } + + public int getAttemptId() { + return this.id; + } + + public long getStartTime() { + return this.startTime; + } + + public String getContainerId() { + return this.containerId; + } + + public String getLogsLink() { + return this.logsLink; + } + +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptsInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptsInfo.java new file mode 100644 index 00000000000..e647aa84b33 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AMAttemptsInfo.java @@ -0,0 +1,45 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by joblicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapreduce.v2.app.webapp.dao; + +import java.util.ArrayList; + +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlRootElement; + +@XmlRootElement(name = "jobAttempts") +@XmlAccessorType(XmlAccessType.FIELD) +public class AMAttemptsInfo { + + @XmlElement(name = "jobAttempt") + protected ArrayList attempt = new ArrayList(); + + public AMAttemptsInfo() { + } // JAXB needs this + + public void add(AMAttemptInfo info) { + this.attempt.add(info); + } + + public ArrayList getAttempts() { + return this.attempt; + } + +} diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java index da57f86c114..a92902009a9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java @@ -214,7 +214,7 @@ public class JobInfo { return this.state.toString(); } - public String getUser() { + public String getUserName() { return this.user; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java index 605ccf534fc..40da3bc00b9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapreduce.v2.app.webapp; +import static org.apache.hadoop.yarn.util.StringHelper.ujoin; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -33,6 +34,7 @@ import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.JobACL; +import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; import org.apache.hadoop.mapreduce.v2.app.AppContext; @@ -44,6 +46,7 @@ import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; +import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Times; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; @@ -76,6 +79,7 @@ import com.sun.jersey.test.framework.WebAppDescriptor; * /ws/v1/mapreduce/jobs * /ws/v1/mapreduce/jobs/{jobid} * /ws/v1/mapreduce/jobs/{jobid}/counters + * /ws/v1/mapreduce/jobs/{jobid}/jobattempts */ public class TestAMWebServicesJobs extends JerseyTest { @@ -777,4 +781,136 @@ public class TestAMWebServicesJobs extends JerseyTest { } } + @Test + public void testJobAttempts() throws JSONException, Exception { + WebResource r = resource(); + Map jobsMap = appContext.getAllJobs(); + for (JobId id : jobsMap.keySet()) { + String jobId = MRApps.toString(id); + + ClientResponse response = r.path("ws").path("v1") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") + .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + JSONObject json = response.getEntity(JSONObject.class); + assertEquals("incorrect number of elements", 1, json.length()); + JSONObject info = json.getJSONObject("jobAttempts"); + verifyJobAttempts(info, jobsMap.get(id)); + } + } + + @Test + public void testJobAttemptsSlash() throws JSONException, Exception { + WebResource r = resource(); + Map jobsMap = appContext.getAllJobs(); + for (JobId id : jobsMap.keySet()) { + String jobId = MRApps.toString(id); + + ClientResponse response = r.path("ws").path("v1") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts/") + .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + JSONObject json = response.getEntity(JSONObject.class); + assertEquals("incorrect number of elements", 1, json.length()); + JSONObject info = json.getJSONObject("jobAttempts"); + verifyJobAttempts(info, jobsMap.get(id)); + } + } + + @Test + public void testJobAttemptsDefault() throws JSONException, Exception { + WebResource r = resource(); + Map jobsMap = appContext.getAllJobs(); + for (JobId id : jobsMap.keySet()) { + String jobId = MRApps.toString(id); + + ClientResponse response = r.path("ws").path("v1") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") + .get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); + JSONObject json = response.getEntity(JSONObject.class); + assertEquals("incorrect number of elements", 1, json.length()); + JSONObject info = json.getJSONObject("jobAttempts"); + verifyJobAttempts(info, jobsMap.get(id)); + } + } + + @Test + public void testJobAttemptsXML() throws Exception { + WebResource r = resource(); + Map jobsMap = appContext.getAllJobs(); + for (JobId id : jobsMap.keySet()) { + String jobId = MRApps.toString(id); + + ClientResponse response = r.path("ws").path("v1") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") + .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); + assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); + String xml = response.getEntity(String.class); + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + DocumentBuilder db = dbf.newDocumentBuilder(); + InputSource is = new InputSource(); + is.setCharacterStream(new StringReader(xml)); + Document dom = db.parse(is); + NodeList attempts = dom.getElementsByTagName("jobAttempts"); + assertEquals("incorrect number of elements", 1, attempts.getLength()); + NodeList info = dom.getElementsByTagName("jobAttempt"); + verifyJobAttemptsXML(info, jobsMap.get(id)); + } + } + + public void verifyJobAttempts(JSONObject info, Job job) + throws JSONException { + + JSONArray attempts = info.getJSONArray("jobAttempt"); + assertEquals("incorrect number of elements", 2, attempts.length()); + for (int i = 0; i < attempts.length(); i++) { + JSONObject attempt = attempts.getJSONObject(i); + verifyJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"), + attempt.getString("nodeId"), attempt.getInt("id"), + attempt.getLong("startTime"), attempt.getString("containerId"), + attempt.getString("logsLink")); + } + } + + public void verifyJobAttemptsXML(NodeList nodes, Job job) { + + assertEquals("incorrect number of elements", 2, nodes.getLength()); + for (int i = 0; i < nodes.getLength(); i++) { + Element element = (Element) nodes.item(i); + verifyJobAttemptsGeneric(job, + WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"), + WebServicesTestUtils.getXmlString(element, "nodeId"), + WebServicesTestUtils.getXmlInt(element, "id"), + WebServicesTestUtils.getXmlLong(element, "startTime"), + WebServicesTestUtils.getXmlString(element, "containerId"), + WebServicesTestUtils.getXmlString(element, "logsLink")); + } + } + + public void verifyJobAttemptsGeneric(Job job, String nodeHttpAddress, + String nodeId, int id, long startTime, String containerId, String logsLink) { + boolean attemptFound = false; + for (AMInfo amInfo : job.getAMInfos()) { + if (amInfo.getAppAttemptId().getAttemptId() == id) { + attemptFound = true; + String nmHost = amInfo.getNodeManagerHost(); + int nmHttpPort = amInfo.getNodeManagerHttpPort(); + int nmPort = amInfo.getNodeManagerPort(); + WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":" + + nmHttpPort, nodeHttpAddress); + WebServicesTestUtils.checkStringMatch("nodeId", + BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId); + assertTrue("startime not greater than 0", startTime > 0); + WebServicesTestUtils.checkStringMatch("containerId", amInfo + .getContainerId().toString(), containerId); + + String localLogsLink = ujoin("node", "containerlogs", containerId); + + assertTrue("logsLink", logsLink.contains(localLogsLink)); + } + } + assertTrue("attempt: " + id + " was not found", attemptFound); + } + } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java index 5153d2ee802..7f7bbcca823 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java @@ -229,7 +229,7 @@ public class HsWebServices { } @GET - @Path("/mapreduce/jobs/{jobid}/attempts") + @Path("/mapreduce/jobs/{jobid}/jobattempts") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java index b91cb6e4f2c..0641cffb580 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.util.BuilderUtils; -@XmlRootElement(name = "amAttempt") +@XmlRootElement(name = "jobAttempt") @XmlAccessorType(XmlAccessType.FIELD) public class AMAttemptInfo { @@ -52,12 +52,14 @@ public class AMAttemptInfo { this.nodeHttpAddress = ""; this.nodeId = ""; String nmHost = amInfo.getNodeManagerHost(); - int nmPort = amInfo.getNodeManagerHttpPort(); + int nmHttpPort = amInfo.getNodeManagerHttpPort(); + int nmPort = amInfo.getNodeManagerPort(); if (nmHost != null) { - this.nodeHttpAddress = nmHost + ":" + nmPort; + this.nodeHttpAddress = nmHost + ":" + nmHttpPort; NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort); this.nodeId = nodeId.toString(); } + this.id = amInfo.getAppAttemptId().getAttemptId(); this.startTime = amInfo.getStartTime(); this.containerId = ""; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java index ee092b8c440..1bce065f66d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java @@ -21,12 +21,14 @@ import java.util.ArrayList; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; -@XmlRootElement(name = "attempts") +@XmlRootElement(name = "jobAttempts") @XmlAccessorType(XmlAccessType.FIELD) public class AMAttemptsInfo { + @XmlElement(name = "jobAttempt") protected ArrayList attempt = new ArrayList(); public AMAttemptsInfo() { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java index 8c9b0603b85..79c4daf5a3e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java @@ -77,7 +77,7 @@ import com.sun.jersey.test.framework.WebAppDescriptor; * * /ws/v1/history/mapreduce/jobs /ws/v1/history/mapreduce/jobs/{jobid} * /ws/v1/history/mapreduce/jobs/{jobid}/counters - * /ws/v1/history/mapreduce/jobs/{jobid}/attempts + * /ws/v1/history/mapreduce/jobs/{jobid}/jobattempts */ public class TestHsWebServicesJobs extends JerseyTest { @@ -626,12 +626,12 @@ public class TestHsWebServicesJobs extends JerseyTest { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") - .path("mapreduce").path("jobs").path(jobId).path("attempts") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); - JSONObject info = json.getJSONObject("attempts"); + JSONObject info = json.getJSONObject("jobAttempts"); verifyHsJobAttempts(info, jobsMap.get(id)); } } @@ -644,12 +644,12 @@ public class TestHsWebServicesJobs extends JerseyTest { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") - .path("mapreduce").path("jobs").path(jobId).path("attempts/") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts/") .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); - JSONObject info = json.getJSONObject("attempts"); + JSONObject info = json.getJSONObject("jobAttempts"); verifyHsJobAttempts(info, jobsMap.get(id)); } } @@ -662,12 +662,12 @@ public class TestHsWebServicesJobs extends JerseyTest { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") - .path("mapreduce").path("jobs").path(jobId).path("attempts") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); JSONObject json = response.getEntity(JSONObject.class); assertEquals("incorrect number of elements", 1, json.length()); - JSONObject info = json.getJSONObject("attempts"); + JSONObject info = json.getJSONObject("jobAttempts"); verifyHsJobAttempts(info, jobsMap.get(id)); } } @@ -680,7 +680,7 @@ public class TestHsWebServicesJobs extends JerseyTest { String jobId = MRApps.toString(id); ClientResponse response = r.path("ws").path("v1").path("history") - .path("mapreduce").path("jobs").path(jobId).path("attempts") + .path("mapreduce").path("jobs").path(jobId).path("jobattempts") .accept(MediaType.APPLICATION_XML).get(ClientResponse.class); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); String xml = response.getEntity(String.class); @@ -689,9 +689,9 @@ public class TestHsWebServicesJobs extends JerseyTest { InputSource is = new InputSource(); is.setCharacterStream(new StringReader(xml)); Document dom = db.parse(is); - NodeList attempts = dom.getElementsByTagName("attempts"); + NodeList attempts = dom.getElementsByTagName("jobAttempts"); assertEquals("incorrect number of elements", 1, attempts.getLength()); - NodeList info = dom.getElementsByTagName("attempt"); + NodeList info = dom.getElementsByTagName("jobAttempt"); verifyHsJobAttemptsXML(info, jobsMap.get(id)); } } @@ -699,7 +699,7 @@ public class TestHsWebServicesJobs extends JerseyTest { public void verifyHsJobAttempts(JSONObject info, Job job) throws JSONException { - JSONArray attempts = info.getJSONArray("attempt"); + JSONArray attempts = info.getJSONArray("jobAttempt"); assertEquals("incorrect number of elements", 2, attempts.length()); for (int i = 0; i < attempts.length(); i++) { JSONObject attempt = attempts.getJSONObject(i); @@ -732,9 +732,10 @@ public class TestHsWebServicesJobs extends JerseyTest { if (amInfo.getAppAttemptId().getAttemptId() == id) { attemptFound = true; String nmHost = amInfo.getNodeManagerHost(); - int nmPort = amInfo.getNodeManagerHttpPort(); + int nmHttpPort = amInfo.getNodeManagerHttpPort(); + int nmPort = amInfo.getNodeManagerPort(); WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":" - + nmPort, nodeHttpAddress); + + nmHttpPort, nodeHttpAddress); WebServicesTestUtils.checkStringMatch("nodeId", BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId); assertTrue("startime not greater than 0", startTime > 0);