svn merge -c 1445448 FIXES: MAPREDUCE-4989. JSONify DataTables input data for Attempts page. Contributed by Ravi Prakash
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1445450 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a6925b3fdd
commit
a6f31c1d91
|
@ -550,6 +550,9 @@ Release 0.23.7 - UNRELEASED
|
|||
MAPREDUCE-4905. test org.apache.hadoop.mapred.pipes
|
||||
(Aleksey Gorshkov via bobby)
|
||||
|
||||
MAPREDUCE-4989. JSONify DataTables input data for Attempts page (Ravi
|
||||
Prakash via jlowe)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
MAPREDUCE-4946. Fix a performance problem for large jobs by reducing the
|
||||
|
|
|
@ -27,18 +27,11 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.util.Times;
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
|
||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
|
||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD;
|
||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
|
||||
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
|
@ -60,7 +53,7 @@ public class TaskPage extends AppView {
|
|||
h2($(TITLE));
|
||||
return;
|
||||
}
|
||||
TBODY<TABLE<Hamlet>> tbody = html.
|
||||
html.
|
||||
table("#attempts").
|
||||
thead().
|
||||
tr().
|
||||
|
@ -72,49 +65,46 @@ public class TaskPage extends AppView {
|
|||
th(".tsh", "Started").
|
||||
th(".tsh", "Finished").
|
||||
th(".tsh", "Elapsed").
|
||||
th(".note", "Note")._()._().
|
||||
tbody();
|
||||
th(".note", "Note")._()._();
|
||||
// Write all the data into a JavaScript array of arrays for JQuery
|
||||
// DataTables to display
|
||||
StringBuilder attemptsTableData = new StringBuilder("[\n");
|
||||
|
||||
for (TaskAttempt attempt : getTaskAttempts()) {
|
||||
TaskAttemptInfo ta = new TaskAttemptInfo(attempt, true);
|
||||
String taid = ta.getId();
|
||||
String progress = percent(ta.getProgress() / 100);
|
||||
ContainerId containerId = ta.getAssignedContainerId();
|
||||
|
||||
String nodeHttpAddr = ta.getNode();
|
||||
long startTime = ta.getStartTime();
|
||||
long finishTime = ta.getFinishTime();
|
||||
long elapsed = ta.getElapsedTime();
|
||||
String diag = ta.getNote() == null ? "" : ta.getNote();
|
||||
TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
|
||||
TD<TR<TBODY<TABLE<Hamlet>>>> nodeTd = row.
|
||||
td(".id", taid).
|
||||
td(".progress", progress).
|
||||
td(".state", ta.getState()).td();
|
||||
if (nodeHttpAddr == null) {
|
||||
nodeTd._("N/A");
|
||||
} else {
|
||||
nodeTd.
|
||||
a(".nodelink", url(HttpConfig.getSchemePrefix(),
|
||||
nodeHttpAddr), nodeHttpAddr);
|
||||
}
|
||||
nodeTd._();
|
||||
if (containerId != null) {
|
||||
String containerIdStr = ta.getAssignedContainerIdStr();
|
||||
row.td().
|
||||
a(".logslink", url(HttpConfig.getSchemePrefix(),
|
||||
nodeHttpAddr, "node", "containerlogs",
|
||||
containerIdStr, app.getJob().getUserName()), "logs")._();
|
||||
} else {
|
||||
row.td()._("N/A")._();
|
||||
}
|
||||
attemptsTableData.append("[\"")
|
||||
.append(ta.getId()).append("\",\"")
|
||||
.append(progress).append("\",\"")
|
||||
.append(ta.getState().toString()).append("\",\"")
|
||||
|
||||
row.
|
||||
td(".ts", Times.format(startTime)).
|
||||
td(".ts", Times.format(finishTime)).
|
||||
td(".dt", StringUtils.formatTime(elapsed)).
|
||||
td(".note", diag)._();
|
||||
.append(nodeHttpAddr == null ? "N/A" :
|
||||
"<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>"
|
||||
+ nodeHttpAddr + "</a>")
|
||||
.append("\",\"")
|
||||
|
||||
.append(ta.getAssignedContainerId() == null ? "N/A" :
|
||||
"<a class='logslink' href='" + url(HttpConfig.getSchemePrefix(), nodeHttpAddr, "node"
|
||||
, "containerlogs", ta.getAssignedContainerIdStr(), app.getJob()
|
||||
.getUserName()) + "'>logs</a>")
|
||||
.append("\",\"")
|
||||
|
||||
.append(ta.getStartTime()).append("\",\"")
|
||||
.append(ta.getFinishTime()).append("\",\"")
|
||||
.append(ta.getElapsedTime()).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
diag))).append("\"],\n");
|
||||
}
|
||||
tbody._()._();
|
||||
//Remove the last comma and close off the array of arrays
|
||||
if(attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') {
|
||||
attemptsTableData.delete(attemptsTableData.length()-2, attemptsTableData.length()-1);
|
||||
}
|
||||
attemptsTableData.append("]");
|
||||
html.script().$type("text/javascript").
|
||||
_("var attemptsTableData=" + attemptsTableData)._();
|
||||
}
|
||||
|
||||
protected boolean isValidRequest() {
|
||||
|
@ -140,9 +130,24 @@ public class TaskPage extends AppView {
|
|||
}
|
||||
|
||||
private String attemptsTableInit() {
|
||||
return tableInit().
|
||||
// Sort by id upon page load
|
||||
append(", aaSorting: [[0, 'asc']]").
|
||||
append("}").toString();
|
||||
return tableInit()
|
||||
.append(", 'aaData': attemptsTableData")
|
||||
.append(", bDeferRender: true")
|
||||
.append(", bProcessing: true")
|
||||
.append("\n,aoColumnDefs:[\n")
|
||||
|
||||
//logs column should not filterable (it includes container ID which may pollute searches)
|
||||
.append("\n{'aTargets': [ 4 ]")
|
||||
.append(", 'bSearchable': false }")
|
||||
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [ 5, 6")
|
||||
.append(" ], 'mRender': renderHadoopDate }")
|
||||
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [ 7")
|
||||
.append(" ], 'mRender': renderHadoopElapsedTime }]")
|
||||
|
||||
// Sort by id upon page load
|
||||
.append("\n, aaSorting: [[0, 'asc']]")
|
||||
.append("}").toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,6 +29,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
|
@ -110,13 +111,17 @@ public class HsTaskPage extends HsView {
|
|||
th(".note", "Note");
|
||||
|
||||
TBODY<TABLE<Hamlet>> tbody = headRow._()._().tbody();
|
||||
for (TaskAttempt ta : getTaskAttempts()) {
|
||||
// Write all the data into a JavaScript array of arrays for JQuery
|
||||
// DataTables to display
|
||||
StringBuilder attemptsTableData = new StringBuilder("[\n");
|
||||
|
||||
for (TaskAttempt ta : getTaskAttempts()) {
|
||||
String taid = MRApps.toString(ta.getID());
|
||||
|
||||
String nodeHttpAddr = ta.getNodeHttpAddress();
|
||||
String containerIdString = ta.getAssignedContainerID().toString();
|
||||
String nodeIdString = ta.getAssignedContainerMgrAddress();
|
||||
String nodeRackName = ta.getNodeRackName();
|
||||
String nodeRackName = ta.getNodeRackName();
|
||||
|
||||
long attemptStartTime = ta.getLaunchTime();
|
||||
long shuffleFinishTime = -1;
|
||||
|
@ -138,58 +143,43 @@ public class HsTaskPage extends HsView {
|
|||
long attemptElapsed =
|
||||
Times.elapsed(attemptStartTime, attemptFinishTime, false);
|
||||
int sortId = ta.getID().getId() + (ta.getID().getTaskId().getId() * 10000);
|
||||
|
||||
TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
|
||||
TD<TR<TBODY<TABLE<Hamlet>>>> td = row.td();
|
||||
|
||||
td.br().$title(String.valueOf(sortId))._(). // sorting
|
||||
_(taid)._().td(ta.getState().toString()).td().a(".nodelink",
|
||||
HttpConfig.getSchemePrefix()+ nodeHttpAddr,
|
||||
nodeRackName + "/" + nodeHttpAddr);
|
||||
td._();
|
||||
row.td().
|
||||
a(".logslink",
|
||||
url("logs", nodeIdString, containerIdString, taid, app.getJob()
|
||||
.getUserName()), "logs")._();
|
||||
|
||||
row.td().
|
||||
br().$title(String.valueOf(attemptStartTime))._().
|
||||
_(Times.format(attemptStartTime))._();
|
||||
attemptsTableData.append("[\"")
|
||||
.append(sortId + " ").append(taid).append("\",\"")
|
||||
.append(ta.getState().toString()).append("\",\"")
|
||||
|
||||
.append("<a class='nodelink' href='" + HttpConfig.getSchemePrefix() + nodeHttpAddr + "'>")
|
||||
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
|
||||
|
||||
.append("<a class='logslink' href='").append(url("logs", nodeIdString
|
||||
, containerIdString, taid, app.getJob().getUserName()))
|
||||
.append("'>logs</a>\",\"")
|
||||
|
||||
.append(attemptStartTime).append("\",\"");
|
||||
|
||||
if(type == TaskType.REDUCE) {
|
||||
row.td().
|
||||
br().$title(String.valueOf(shuffleFinishTime))._().
|
||||
_(Times.format(shuffleFinishTime))._();
|
||||
row.td().
|
||||
br().$title(String.valueOf(sortFinishTime))._().
|
||||
_(Times.format(sortFinishTime))._();
|
||||
attemptsTableData.append(shuffleFinishTime).append("\",\"")
|
||||
.append(sortFinishTime).append("\",\"");
|
||||
}
|
||||
row.
|
||||
td().
|
||||
br().$title(String.valueOf(attemptFinishTime))._().
|
||||
_(Times.format(attemptFinishTime))._();
|
||||
|
||||
attemptsTableData.append(attemptFinishTime).append("\",\"");
|
||||
|
||||
if(type == TaskType.REDUCE) {
|
||||
row.td().
|
||||
br().$title(String.valueOf(elapsedShuffleTime))._().
|
||||
_(formatTime(elapsedShuffleTime))._();
|
||||
row.td().
|
||||
br().$title(String.valueOf(elapsedSortTime))._().
|
||||
_(formatTime(elapsedSortTime))._();
|
||||
row.td().
|
||||
br().$title(String.valueOf(elapsedReduceTime))._().
|
||||
_(formatTime(elapsedReduceTime))._();
|
||||
attemptsTableData.append(elapsedShuffleTime).append("\",\"")
|
||||
.append(elapsedSortTime).append("\",\"")
|
||||
.append(elapsedReduceTime).append("\",\"");
|
||||
}
|
||||
|
||||
row.
|
||||
td().
|
||||
br().$title(String.valueOf(attemptElapsed))._().
|
||||
_(formatTime(attemptElapsed))._().
|
||||
td(".note", Joiner.on('\n').join(ta.getDiagnostics()));
|
||||
row._();
|
||||
attemptsTableData.append(attemptElapsed).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
Joiner.on('\n').join(ta.getDiagnostics())))).append("\"],\n");
|
||||
}
|
||||
|
||||
|
||||
//Remove the last comma and close off the array of arrays
|
||||
if(attemptsTableData.charAt(attemptsTableData.length() - 2) == ',') {
|
||||
attemptsTableData.delete(attemptsTableData.length()-2, attemptsTableData.length()-1);
|
||||
}
|
||||
attemptsTableData.append("]");
|
||||
html.script().$type("text/javascript").
|
||||
_("var attemptsTableData=" + attemptsTableData)._();
|
||||
|
||||
TR<TFOOT<TABLE<Hamlet>>> footRow = tbody._().tfoot().tr();
|
||||
footRow.
|
||||
th().input("search_init").$type(InputType.text).
|
||||
|
@ -237,10 +227,6 @@ public class HsTaskPage extends HsView {
|
|||
footRow._()._()._();
|
||||
}
|
||||
|
||||
private String formatTime(long elapsed) {
|
||||
return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if this is a valid request else false.
|
||||
*/
|
||||
|
@ -292,24 +278,34 @@ public class HsTaskPage extends HsView {
|
|||
TaskId taskID = MRApps.toTaskID($(TASK_ID));
|
||||
type = taskID.getTaskType();
|
||||
}
|
||||
StringBuilder b = tableInit().
|
||||
append(",aoColumnDefs:[");
|
||||
StringBuilder b = tableInit()
|
||||
.append(", 'aaData': attemptsTableData")
|
||||
.append(", bDeferRender: true")
|
||||
.append(", bProcessing: true")
|
||||
.append("\n,aoColumnDefs:[\n")
|
||||
|
||||
b.append("{'sType':'title-numeric', 'aTargets': [ 0");
|
||||
if(type == TaskType.REDUCE) {
|
||||
b.append(", 7, 8, 9, 10");
|
||||
} else { //MAP
|
||||
b.append(", 5");
|
||||
}
|
||||
b.append(" ] }]");
|
||||
//logs column should not filterable (it includes container ID which may pollute searches)
|
||||
.append("\n{'aTargets': [ 3 ]")
|
||||
.append(", 'bSearchable': false }")
|
||||
|
||||
// Sort by id upon page load
|
||||
b.append(", aaSorting: [[0, 'asc']]");
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [ 0 ]")
|
||||
.append(", 'mRender': parseHadoopAttemptID }")
|
||||
|
||||
b.append("}");
|
||||
return b.toString();
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [ 4, 5")
|
||||
//Column numbers are different for maps and reduces
|
||||
.append(type == TaskType.REDUCE ? ", 6, 7" : "")
|
||||
.append(" ], 'mRender': renderHadoopDate }")
|
||||
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [")
|
||||
.append(type == TaskType.REDUCE ? "8, 9, 10, 11" : "6")
|
||||
.append(" ], 'mRender': renderHadoopElapsedTime }]")
|
||||
|
||||
// Sort by id upon page load
|
||||
.append("\n, aaSorting: [[0, 'asc']]")
|
||||
.append("}");
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
|
||||
private String attemptsPostTableInit() {
|
||||
return "var asInitVals = new Array();\n" +
|
||||
"$('tfoot input').keyup( function () \n{"+
|
||||
|
|
|
@ -140,6 +140,7 @@ public class HsTasksBlock extends HtmlBlock {
|
|||
attemptFinishTime = ta.getFinishTime();
|
||||
attemptElapsed = ta.getElapsedTime();
|
||||
}
|
||||
|
||||
tasksTableData.append("[\"")
|
||||
.append("<a href='" + url("task", tid)).append("'>")
|
||||
.append(tid).append("</a>\",\"")
|
||||
|
@ -205,9 +206,4 @@ public class HsTasksBlock extends HtmlBlock {
|
|||
|
||||
footRow._()._()._();
|
||||
}
|
||||
|
||||
private String formatTime(long elapsed) {
|
||||
return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -67,33 +67,25 @@ public class HsTasksPage extends HsView {
|
|||
type = MRApps.taskType(symbol);
|
||||
}
|
||||
StringBuilder b = tableInit().
|
||||
append(", 'aaData': tasksTableData");
|
||||
b.append(", bDeferRender: true");
|
||||
b.append(", bProcessing: true");
|
||||
append(", 'aaData': tasksTableData")
|
||||
.append(", bDeferRender: true")
|
||||
.append(", bProcessing: true")
|
||||
|
||||
b.append("\n, aoColumnDefs: [\n");
|
||||
b.append("{'sType':'numeric', 'aTargets': [ 0 ]");
|
||||
b.append(", 'mRender': parseHadoopID }");
|
||||
.append("\n, aoColumnDefs: [\n")
|
||||
.append("{'sType':'numeric', 'aTargets': [ 0 ]")
|
||||
.append(", 'mRender': parseHadoopID }")
|
||||
|
||||
b.append(", {'sType':'numeric', 'aTargets': [ 4");
|
||||
if(type == TaskType.REDUCE) {
|
||||
b.append(", 9, 10, 11, 12");
|
||||
} else { //MAP
|
||||
b.append(", 7");
|
||||
}
|
||||
b.append(" ], 'mRender': renderHadoopElapsedTime }");
|
||||
.append(", {'sType':'numeric', 'aTargets': [ 4")
|
||||
.append(type == TaskType.REDUCE ? ", 9, 10, 11, 12" : ", 7")
|
||||
.append(" ], 'mRender': renderHadoopElapsedTime }")
|
||||
|
||||
b.append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5");
|
||||
if(type == TaskType.REDUCE) {
|
||||
b.append(", 6, 7, 8");
|
||||
} else { //MAP
|
||||
b.append(", 6");
|
||||
}
|
||||
b.append(" ], 'mRender': renderHadoopDate }]");
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [ 2, 3, 5")
|
||||
.append(type == TaskType.REDUCE ? ", 6, 7, 8" : ", 6")
|
||||
.append(" ], 'mRender': renderHadoopDate }]")
|
||||
|
||||
// Sort by id upon page load
|
||||
b.append("\n, aaSorting: [[0, 'asc']]");
|
||||
b.append("}");
|
||||
.append("\n, aaSorting: [[0, 'asc']]")
|
||||
.append("}");
|
||||
return b.toString();
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue