diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index dcdfc1de757..0c2f73c3c10 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -47,6 +47,9 @@ Release 2.4.0 - UNRELEASED MAPREDUCE-5692. Add explicit diagnostics when a task attempt is killed due to speculative execution (Gera Shegalov via Sandy Ryza) + MAPREDUCE-5550. Task Status message (reporter.setStatus) not shown in UI + with Hadoop 2.0 (Gera Shegalov via Sandy Ryza) + OPTIMIZATIONS MAPREDUCE-5484. YarnChild unnecessarily loads job conf twice (Sandy Ryza) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java index efb46d538b4..9d14d4e2b6a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java @@ -376,11 +376,15 @@ public abstract class TaskImpl implements Task, EventHandler { TaskReport report = recordFactory.newRecordInstance(TaskReport.class); readLock.lock(); try { + TaskAttempt bestAttempt = selectBestAttempt(); report.setTaskId(taskId); report.setStartTime(getLaunchTime()); report.setFinishTime(getFinishTime()); report.setTaskState(getState()); - report.setProgress(getProgress()); + report.setProgress(bestAttempt == null ? 0f : bestAttempt.getProgress()); + report.setStatus(bestAttempt == null + ? "" + : bestAttempt.getReport().getStateString()); for (TaskAttempt attempt : attempts.values()) { if (TaskAttemptState.RUNNING.equals(attempt.getState())) { @@ -400,7 +404,9 @@ public abstract class TaskImpl implements Task, EventHandler { // Add a copy of counters as the last step so that their lifetime on heap // is as small as possible. - report.setCounters(TypeConverter.toYarn(getCounters())); + report.setCounters(TypeConverter.toYarn(bestAttempt == null + ? TaskAttemptImpl.EMPTY_COUNTERS + : bestAttempt.getCounters())); return report; } finally { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java index 866c7f1ebb4..47d9f1ea59f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java @@ -63,6 +63,7 @@ public class TaskPage extends AppView { th(".id", "Attempt"). th(".progress", "Progress"). th(".state", "State"). + th(".status", "Status"). th(".node", "Node"). th(".logs", "Logs"). th(".tsh", "Started"). @@ -84,6 +85,7 @@ public class TaskPage extends AppView { .append(ta.getId()).append("\",\"") .append(progress).append("\",\"") .append(ta.getState().toString()).append("\",\"") + .append(ta.getStatus()).append("\",\"") .append(nodeHttpAddr == null ? "N/A" : "" @@ -144,13 +146,13 @@ public class TaskPage extends AppView { .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) - .append("\n{'aTargets': [ 4 ]") + .append("\n{'aTargets': [ 5 ]") .append(", 'bSearchable': false }") - .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") + .append("\n, {'sType':'numeric', 'aTargets': [ 6, 7") .append(" ], 'mRender': renderHadoopDate }") - .append("\n, {'sType':'numeric', 'aTargets': [ 7") + .append("\n, {'sType':'numeric', 'aTargets': [ 8") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java index dc6bbb24d48..e2c65e320b8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java @@ -59,6 +59,7 @@ public class TasksBlock extends HtmlBlock { tr(). th("Task"). th("Progress"). + th("Status"). th("State"). th("Start Time"). th("Finish Time"). @@ -81,6 +82,7 @@ public class TasksBlock extends HtmlBlock { .append(join(pct, '%')).append("'> ").append("
\",\"") + .append(info.getStatus()).append("\",\"") .append(info.getState()).append("\",\"") .append(info.getStartTime()).append("\",\"") diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java index 3753b1ea39d..0212ae4f741 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksPage.java @@ -50,10 +50,10 @@ public class TasksPage extends AppView { .append(", 'mRender': parseHadoopProgress }") - .append("\n, {'sType':'numeric', 'aTargets': [3, 4]") + .append("\n, {'sType':'numeric', 'aTargets': [4, 5]") .append(", 'mRender': renderHadoopDate }") - .append("\n, {'sType':'numeric', 'aTargets': [5]") + .append("\n, {'sType':'numeric', 'aTargets': [6]") .append(", 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java index c35411b0a2e..d8e89b1cbc9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java @@ -25,6 +25,7 @@ import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlTransient; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; @@ -45,6 +46,7 @@ public class TaskAttemptInfo { protected String id; protected String rack; protected TaskAttemptState state; + protected String status; protected String nodeHttpAddress; protected String diagnostics; protected String type; @@ -61,29 +63,23 @@ public class TaskAttemptInfo { } public TaskAttemptInfo(TaskAttempt ta, TaskType type, Boolean isRunning) { + final TaskAttemptReport report = ta.getReport(); this.type = type.toString(); this.id = MRApps.toString(ta.getID()); this.nodeHttpAddress = ta.getNodeHttpAddress(); - this.startTime = ta.getLaunchTime(); - this.finishTime = ta.getFinishTime(); - this.assignedContainerId = ConverterUtils.toString(ta - .getAssignedContainerID()); - this.assignedContainer = ta.getAssignedContainerID(); - this.progress = ta.getProgress() * 100; - this.state = ta.getState(); + this.startTime = report.getStartTime(); + this.finishTime = report.getFinishTime(); + this.assignedContainerId = ConverterUtils.toString(report.getContainerId()); + this.assignedContainer = report.getContainerId(); + this.progress = report.getProgress() * 100; + this.status = report.getStateString(); + this.state = report.getTaskAttemptState(); this.elapsedTime = Times .elapsed(this.startTime, this.finishTime, isRunning); if (this.elapsedTime == -1) { this.elapsedTime = 0; } - List diagnostics = ta.getDiagnostics(); - if (diagnostics != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); - for (String diag : diagnostics) { - b.append(diag); - } - this.diagnostics = b.toString(); - } + this.diagnostics = report.getDiagnosticInfo(); this.rack = ta.getNodeRackName(); } @@ -99,6 +95,10 @@ public class TaskAttemptInfo { return this.state.toString(); } + public String getStatus() { + return status; + } + public String getId() { return this.id; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java index 40983400aef..00305a83e40 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java @@ -43,6 +43,7 @@ public class TaskInfo { protected TaskState state; protected String type; protected String successfulAttempt; + protected String status; @XmlTransient int taskNum; @@ -66,6 +67,7 @@ public class TaskInfo { this.elapsedTime = 0; } this.progress = report.getProgress() * 100; + this.status = report.getStatus(); this.id = MRApps.toString(task.getID()); this.taskNum = task.getID().getId(); this.successful = getSuccessfulAttempt(task); @@ -121,4 +123,7 @@ public class TaskInfo { return null; } + public String getStatus() { + return status; + } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java index c018096c358..eb4919c9cde 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java @@ -174,22 +174,37 @@ public class MockJobs extends MockApps { report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1); report.setProgress((float) Math.random()); + report.setStatus("Moving average: " + Math.random()); report.setCounters(TypeConverter.toYarn(newCounters())); report.setTaskState(TASK_STATES.next()); return report; } public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) { + ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( + id.getTaskId().getJobId().getAppId(), 0); + ContainerId containerId = ContainerId.newInstance(appAttemptId, 0); TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class); report.setTaskAttemptId(id); report .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT)); report.setFinishTime(System.currentTimeMillis() + (int) (Math.random() * DT) + 1); + + if (id.getTaskId().getTaskType() == TaskType.REDUCE) { + report.setShuffleFinishTime( + (report.getFinishTime() + report.getStartTime()) / 2); + report.setSortFinishTime( + (report.getFinishTime() + report.getShuffleFinishTime()) / 2); + } + report.setPhase(PHASES.next()); report.setTaskAttemptState(TASK_ATTEMPT_STATES.next()); report.setProgress((float) Math.random()); report.setCounters(TypeConverter.toYarn(newCounters())); + report.setContainerId(containerId); + report.setDiagnosticInfo(DIAGS.next()); + report.setStateString("Moving average " + Math.random()); return report; } @@ -230,8 +245,6 @@ public class MockJobs extends MockApps { taid.setTaskId(tid); taid.setId(i); final TaskAttemptReport report = newTaskAttemptReport(taid); - final List diags = Lists.newArrayList(); - diags.add(DIAGS.next()); return new TaskAttempt() { @Override public NodeId getNodeId() throws UnsupportedOperationException{ @@ -250,12 +263,12 @@ public class MockJobs extends MockApps { @Override public long getLaunchTime() { - return 0; + return report.getStartTime(); } @Override public long getFinishTime() { - return 0; + return report.getFinishTime(); } @Override @@ -313,7 +326,7 @@ public class MockJobs extends MockApps { @Override public List getDiagnostics() { - return diags; + return Lists.newArrayList(report.getDiagnosticInfo()); } @Override @@ -323,12 +336,12 @@ public class MockJobs extends MockApps { @Override public long getShuffleFinishTime() { - return 0; + return report.getShuffleFinishTime(); } @Override public long getSortFinishTime() { - return 0; + return report.getSortFinishTime(); } @Override diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index 886270bf1bb..dcd5d2954ba 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -1,3 +1,4 @@ +/** /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -425,9 +426,9 @@ public class TestAMWebServicesAttempts extends JerseyTest { public void verifyAMTaskAttempt(JSONObject info, TaskAttempt att, TaskType ttype) throws JSONException { if (ttype == TaskType.REDUCE) { - assertEquals("incorrect number of elements", 16, info.length()); + assertEquals("incorrect number of elements", 17, info.length()); } else { - assertEquals("incorrect number of elements", 11, info.length()); + assertEquals("incorrect number of elements", 12, info.length()); } verifyTaskAttemptGeneric(att, ttype, info.getString("id"), @@ -532,11 +533,11 @@ public class TestAMWebServicesAttempts extends JerseyTest { assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(), mergeFinishTime); assertEquals("elapsedShuffleTime wrong", - ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime); + ta.getShuffleFinishTime() - ta.getLaunchTime(), elapsedShuffleTime); assertEquals("elapsedMergeTime wrong", - ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime); + ta.getSortFinishTime() - ta.getShuffleFinishTime(), elapsedMergeTime); assertEquals("elapsedReduceTime wrong", - ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime); + ta.getFinishTime() - ta.getSortFinishTime(), elapsedReduceTime); } @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java index b8ece70f323..8bf1bb7b752 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java @@ -525,12 +525,13 @@ public class TestAMWebServicesTasks extends JerseyTest { public void verifyAMSingleTask(JSONObject info, Task task) throws JSONException { - assertEquals("incorrect number of elements", 8, info.length()); + assertEquals("incorrect number of elements", 9, info.length()); verifyTaskGeneric(task, info.getString("id"), info.getString("state"), info.getString("type"), info.getString("successfulAttempt"), info.getLong("startTime"), info.getLong("finishTime"), - info.getLong("elapsedTime"), (float) info.getDouble("progress")); + info.getLong("elapsedTime"), (float) info.getDouble("progress"), + info.getString("status")); } public void verifyAMTask(JSONArray arr, Job job, String type) @@ -555,7 +556,7 @@ public class TestAMWebServicesTasks extends JerseyTest { public void verifyTaskGeneric(Task task, String id, String state, String type, String successfulAttempt, long startTime, long finishTime, - long elapsedTime, float progress) { + long elapsedTime, float progress, String status) { TaskId taskid = task.getID(); String tid = MRApps.toString(taskid); @@ -572,6 +573,7 @@ public class TestAMWebServicesTasks extends JerseyTest { assertEquals("finishTime wrong", report.getFinishTime(), finishTime); assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime); assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f); + assertEquals("status wrong", report.getStatus(), status); } public void verifyAMSingleTaskXML(Element element, Task task) { @@ -582,7 +584,8 @@ public class TestAMWebServicesTasks extends JerseyTest { WebServicesTestUtils.getXmlLong(element, "startTime"), WebServicesTestUtils.getXmlLong(element, "finishTime"), WebServicesTestUtils.getXmlLong(element, "elapsedTime"), - WebServicesTestUtils.getXmlFloat(element, "progress")); + WebServicesTestUtils.getXmlFloat(element, "progress"), + WebServicesTestUtils.getXmlString(element, "status")); } public void verifyAMTaskXML(NodeList nodes, Job job) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskReport.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskReport.java index 66cca13f5d1..1444a53f3ee 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskReport.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/TaskReport.java @@ -24,10 +24,10 @@ public interface TaskReport { public abstract TaskId getTaskId(); public abstract TaskState getTaskState(); public abstract float getProgress(); + public abstract String getStatus(); public abstract long getStartTime(); public abstract long getFinishTime(); public abstract Counters getCounters(); - public abstract List getRunningAttemptsList(); public abstract TaskAttemptId getRunningAttempt(int index); public abstract int getRunningAttemptsCount(); @@ -42,6 +42,7 @@ public interface TaskReport { public abstract void setTaskId(TaskId taskId); public abstract void setTaskState(TaskState taskState); public abstract void setProgress(float progress); + public abstract void setStatus(String status); public abstract void setStartTime(long startTime); public abstract void setFinishTime(long finishTime); public abstract void setCounters(Counters counters); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java index 9801a16f39c..ba1245c2e3d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/records/impl/pb/TaskReportPBImpl.java @@ -49,6 +49,7 @@ public class TaskReportPBImpl extends ProtoBase implements Task private List runningAttempts = null; private TaskAttemptId successfulAttemptId = null; private List diagnostics = null; + private String status; public TaskReportPBImpl() { @@ -171,11 +172,22 @@ public class TaskReportPBImpl extends ProtoBase implements Task return (p.getProgress()); } + @Override + public String getStatus() { + return status; + } + @Override public void setProgress(float progress) { maybeInitBuilder(); builder.setProgress((progress)); } + + @Override + public void setStatus(String status) { + this.status = status; + } + @Override public TaskState getTaskState() { TaskReportProtoOrBuilder p = viaProto ? proto : builder; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java index 4d8a8cfc7b1..fde3a3a03f5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java @@ -34,6 +34,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.webapp.App; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.yarn.util.Times; @@ -89,6 +90,7 @@ public class HsTaskPage extends HsView { headRow. th(".id", "Attempt"). th(".state", "State"). + th(".status", "Status"). th(".node", "Node"). th(".logs", "Logs"). th(".tsh", "Start Time"); @@ -113,15 +115,16 @@ public class HsTaskPage extends HsView { // DataTables to display StringBuilder attemptsTableData = new StringBuilder("[\n"); - for (TaskAttempt ta : getTaskAttempts()) { - String taid = MRApps.toString(ta.getID()); + for (TaskAttempt attempt : getTaskAttempts()) { + final TaskAttemptInfo ta = new TaskAttemptInfo(attempt, false); + String taid = ta.getId(); - String nodeHttpAddr = ta.getNodeHttpAddress(); - String containerIdString = ta.getAssignedContainerID().toString(); - String nodeIdString = ta.getAssignedContainerMgrAddress(); - String nodeRackName = ta.getNodeRackName(); + String nodeHttpAddr = ta.getNode(); + String containerIdString = ta.getAssignedContainerIdStr(); + String nodeIdString = attempt.getAssignedContainerMgrAddress(); + String nodeRackName = ta.getRack(); - long attemptStartTime = ta.getLaunchTime(); + long attemptStartTime = ta.getStartTime(); long shuffleFinishTime = -1; long sortFinishTime = -1; long attemptFinishTime = ta.getFinishTime(); @@ -129,8 +132,8 @@ public class HsTaskPage extends HsView { long elapsedSortTime = -1; long elapsedReduceTime = -1; if(type == TaskType.REDUCE) { - shuffleFinishTime = ta.getShuffleFinishTime(); - sortFinishTime = ta.getSortFinishTime(); + shuffleFinishTime = attempt.getShuffleFinishTime(); + sortFinishTime = attempt.getSortFinishTime(); elapsedShuffleTime = Times.elapsed(attemptStartTime, shuffleFinishTime, false); elapsedSortTime = @@ -140,11 +143,13 @@ public class HsTaskPage extends HsView { } long attemptElapsed = Times.elapsed(attemptStartTime, attemptFinishTime, false); - int sortId = ta.getID().getId() + (ta.getID().getTaskId().getId() * 10000); + int sortId = attempt.getID().getId() + + (attempt.getID().getTaskId().getId() * 10000); attemptsTableData.append("[\"") .append(sortId + " ").append(taid).append("\",\"") - .append(ta.getState().toString()).append("\",\"") + .append(ta.getState()).append("\",\"") + .append(ta.getStatus()).append("\",\"") .append("
") .append(nodeRackName + "/" + nodeHttpAddr + "\",\"") @@ -167,8 +172,9 @@ public class HsTaskPage extends HsView { .append(elapsedReduceTime).append("\",\""); } attemptsTableData.append(attemptElapsed).append("\",\"") - .append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml( - Joiner.on('\n').join(ta.getDiagnostics())))).append("\"],\n"); + .append(StringEscapeUtils.escapeJavaScript( + StringEscapeUtils.escapeHtml(ta.getNote()))) + .append("\"],\n"); } //Remove the last comma and close off the array of arrays if(attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') { @@ -184,6 +190,8 @@ public class HsTaskPage extends HsView { $name("attempt_name").$value("Attempt")._()._(). th().input("search_init").$type(InputType.text). $name("attempt_state").$value("State")._()._(). + th().input("search_init").$type(InputType.text). + $name("attempt_status").$value("Status")._()._(). th().input("search_init").$type(InputType.text). $name("attempt_node").$value("Node")._()._(). th().input("search_init").$type(InputType.text). @@ -283,19 +291,19 @@ public class HsTaskPage extends HsView { .append("\n,aoColumnDefs:[\n") //logs column should not filterable (it includes container ID which may pollute searches) - .append("\n{'aTargets': [ 3 ]") + .append("\n{'aTargets': [ 4 ]") .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append(", 'mRender': parseHadoopAttemptID }") - .append("\n, {'sType':'numeric', 'aTargets': [ 4, 5") + .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces - .append(type == TaskType.REDUCE ? ", 6, 7" : "") + .append(type == TaskType.REDUCE ? ", 7, 8" : "") .append(" ], 'mRender': renderHadoopDate }") .append("\n, {'sType':'numeric', 'aTargets': [") - .append(type == TaskType.REDUCE ? "8, 9, 10, 11" : "6") + .append(type == TaskType.REDUCE ? "9, 10, 11, 12" : "7") .append(" ], 'mRender': renderHadoopElapsedTime }]") // Sort by id upon page load diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java index 1d22c313a71..241bdb246fd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java @@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobReport; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; @@ -138,11 +139,31 @@ public class TestBlocks { when(attempt.getAssignedContainerMgrAddress()).thenReturn( "assignedContainerMgrAddress"); when(attempt.getNodeRackName()).thenReturn("nodeRackName"); - when(attempt.getLaunchTime()).thenReturn(100002L); - when(attempt.getFinishTime()).thenReturn(100012L); - when(attempt.getShuffleFinishTime()).thenReturn(100010L); - when(attempt.getSortFinishTime()).thenReturn(100011L); - when(attempt.getState()).thenReturn(TaskAttemptState.SUCCEEDED); + + final long taStartTime = 100002L; + final long taFinishTime = 100012L; + final long taShuffleFinishTime = 100010L; + final long taSortFinishTime = 100011L; + final TaskAttemptState taState = TaskAttemptState.SUCCEEDED; + + when(attempt.getLaunchTime()).thenReturn(taStartTime); + when(attempt.getFinishTime()).thenReturn(taFinishTime); + when(attempt.getShuffleFinishTime()).thenReturn(taShuffleFinishTime); + when(attempt.getSortFinishTime()).thenReturn(taSortFinishTime); + when(attempt.getState()).thenReturn(taState); + + TaskAttemptReport taReport = mock(TaskAttemptReport.class); + when(taReport.getStartTime()).thenReturn(taStartTime); + when(taReport.getFinishTime()).thenReturn(taFinishTime); + when(taReport.getShuffleFinishTime()).thenReturn(taShuffleFinishTime); + when(taReport.getSortFinishTime()).thenReturn(taSortFinishTime); + when(taReport.getContainerId()).thenReturn(containerId); + when(taReport.getProgress()).thenReturn(1.0f); + when(taReport.getStateString()).thenReturn("Processed 128/128 records"); + when(taReport.getTaskAttemptState()).thenReturn(taState); + when(taReport.getDiagnosticInfo()).thenReturn(""); + + when(attempt.getReport()).thenReturn(taReport); attempts.put(taId, attempt); when(task.getAttempts()).thenReturn(attempts); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 8a5a6db88a3..60dc235d684 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -444,9 +444,9 @@ public class TestHsWebServicesAttempts extends JerseyTest { public void verifyHsTaskAttempt(JSONObject info, TaskAttempt att, TaskType ttype) throws JSONException { if (ttype == TaskType.REDUCE) { - assertEquals("incorrect number of elements", 16, info.length()); + assertEquals("incorrect number of elements", 17, info.length()); } else { - assertEquals("incorrect number of elements", 11, info.length()); + assertEquals("incorrect number of elements", 12, info.length()); } verifyTaskAttemptGeneric(att, ttype, info.getString("id"), @@ -551,11 +551,11 @@ public class TestHsWebServicesAttempts extends JerseyTest { assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(), mergeFinishTime); assertEquals("elapsedShuffleTime wrong", - ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime); + ta.getShuffleFinishTime() - ta.getLaunchTime(), elapsedShuffleTime); assertEquals("elapsedMergeTime wrong", - ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime); + ta.getSortFinishTime() - ta.getShuffleFinishTime(), elapsedMergeTime); assertEquals("elapsedReduceTime wrong", - ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime); + ta.getFinishTime() - ta.getSortFinishTime(), elapsedReduceTime); } @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java index e76f37c24c0..ee0ccc6afcf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java @@ -538,7 +538,7 @@ public class TestHsWebServicesTasks extends JerseyTest { public void verifyHsSingleTask(JSONObject info, Task task) throws JSONException { - assertEquals("incorrect number of elements", 8, info.length()); + assertEquals("incorrect number of elements", 9, info.length()); verifyTaskGeneric(task, info.getString("id"), info.getString("state"), info.getString("type"), info.getString("successfulAttempt"),