MAPREDUCE-6316. Task Attempt List entries should link to the task overview. (Siqi Li via gera)

(cherry picked from commit 63d40d5286)
This commit is contained in:
Gera Shegalov 2015-06-20 11:38:21 -07:00
parent 5774b1ff02
commit 34e744ce1a
8 changed files with 133 additions and 17 deletions

View File

@ -88,6 +88,9 @@ Release 2.8.0 - UNRELEASED
MAPREDUCE-6395. Improve the commit failure messages in MRAppMaster recovery. MAPREDUCE-6395. Improve the commit failure messages in MRAppMaster recovery.
(Brahma Reddy Battula via gera) (Brahma Reddy Battula via gera)
MAPREDUCE-6316. Task Attempt List entries should link to the task overview.
(Siqi Li via gera)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -29,9 +29,11 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
@ -50,6 +52,12 @@ public class AttemptsPage extends TaskPage {
return true; return true;
} }
@Override
protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) {
return "<a href='" + url("task", taskId.toString()) +
"'>" + ta.getId() + "</a>";
}
@Override @Override
protected Collection<TaskAttempt> getTaskAttempts() { protected Collection<TaskAttempt> getTaskAttempts() {
List<TaskAttempt> fewTaskAttemps = new ArrayList<TaskAttempt>(); List<TaskAttempt> fewTaskAttemps = new ArrayList<TaskAttempt>();

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
@ -128,8 +129,9 @@ public class TaskPage extends AppView {
String nodeHttpAddr = ta.getNode(); String nodeHttpAddr = ta.getNode();
String diag = ta.getNote() == null ? "" : ta.getNote(); String diag = ta.getNote() == null ? "" : ta.getNote();
TaskId taskId = attempt.getID().getTaskId();
attemptsTableData.append("[\"") attemptsTableData.append("[\"")
.append(ta.getId()).append("\",\"") .append(getAttemptId(taskId, ta)).append("\",\"")
.append(progress).append("\",\"") .append(progress).append("\",\"")
.append(ta.getState().toString()).append("\",\"") .append(ta.getState().toString()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeJavaScript(
@ -182,6 +184,10 @@ public class TaskPage extends AppView {
} }
protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) {
return ta.getId();
}
protected boolean isValidRequest() { protected boolean isValidRequest() {
return app.getTask() != null; return app.getTask() != null;
} }
@ -215,6 +221,9 @@ public class TaskPage extends AppView {
.append("\n{'aTargets': [ 5 ]") .append("\n{'aTargets': [ 5 ]")
.append(", 'bSearchable': false }") .append(", 'bSearchable': false }")
.append("\n, {'sType':'string', 'aTargets': [ 0 ]")
.append(", 'mRender': parseHadoopID }")
.append("\n, {'sType':'numeric', 'aTargets': [ 6, 7") .append("\n, {'sType':'numeric', 'aTargets': [ 6, 7")
.append(" ], 'mRender': renderHadoopDate }") .append(" ], 'mRender': renderHadoopDate }")

View File

@ -24,19 +24,24 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.junit.Test; import org.junit.Test;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl;
import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl;
import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.webapp.view.BlockForTest; import org.apache.hadoop.yarn.webapp.view.BlockForTest;
@ -137,8 +142,68 @@ public class TestBlocks {
assertTrue(data.toString().contains("100011")); assertTrue(data.toString().contains("100011"));
assertFalse(data.toString().contains("Dummy Status \n*")); assertFalse(data.toString().contains("Dummy Status \n*"));
assertTrue(data.toString().contains("Dummy Status \\n*")); assertTrue(data.toString().contains("Dummy Status \\n*"));
}
/**
* test AttemptsBlock's rendering.
*/
@Test
public void testAttemptsBlock() {
AppContext ctx = mock(AppContext.class);
AppForTest app = new AppForTest(ctx);
JobId jobId = new JobIdPBImpl();
jobId.setId(0);
jobId.setAppId(ApplicationIdPBImpl.newInstance(0,1));
TaskId taskId = new TaskIdPBImpl();
taskId.setId(0);
taskId.setTaskType(TaskType.REDUCE);
taskId.setJobId(jobId);
Task task = mock(Task.class);
when(task.getID()).thenReturn(taskId);
TaskReport report = mock(TaskReport.class);
when(task.getReport()).thenReturn(report);
when(task.getType()).thenReturn(TaskType.REDUCE);
Map<TaskId, Task> tasks =
new HashMap<TaskId, Task>();
Map<TaskAttemptId, TaskAttempt> attempts =
new HashMap<TaskAttemptId, TaskAttempt>();
TaskAttempt attempt = mock(TaskAttempt.class);
TaskAttemptId taId = new TaskAttemptIdPBImpl();
taId.setId(0);
taId.setTaskId(task.getID());
when(attempt.getID()).thenReturn(taId);
final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
when(attempt.getState()).thenReturn(taState);
TaskAttemptReport taReport = mock(TaskAttemptReport.class);
when(taReport.getTaskAttemptState()).thenReturn(taState);
when(attempt.getReport()).thenReturn(taReport);
attempts.put(taId, attempt);
tasks.put(taskId, task);
when(task.getAttempts()).thenReturn(attempts);
app.setTask(task);
Job job = mock(Job.class);
when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
app.setJob(job);
AttemptsBlockForTest block = new AttemptsBlockForTest(app,
new Configuration());
block.addParameter(AMParams.TASK_TYPE, "r");
block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
PrintWriter pWriter = new PrintWriter(data);
Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
block.render(html);
pWriter.flush();
assertTrue(data.toString().contains(
"<a href='" + block.url("task",task.getID().toString()) +"'>"
+"attempt_0_0001_r_000000_0</a>"));
} }
private class ConfBlockForTest extends ConfBlock { private class ConfBlockForTest extends ConfBlock {
@ -168,4 +233,29 @@ public class TestBlocks {
} }
} }
private class AttemptsBlockForTest extends FewAttemptsBlock {
private final Map<String, String> params = new HashMap<String, String>();
public void addParameter(String name, String value) {
params.put(name, value);
}
public String $(String key, String defaultValue) {
String value = params.get(key);
return value == null ? defaultValue : value;
}
public AttemptsBlockForTest(App ctx, Configuration conf) {
super(ctx, conf);
}
@Override
public String url(String... parts) {
String result = "url://";
for (String string : parts) {
result += string + ":";
}
return result;
}
}
} }

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App; import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.yarn.webapp.SubView; import org.apache.hadoop.yarn.webapp.SubView;
@ -59,6 +60,12 @@ public class HsAttemptsPage extends HsTaskPage {
return app.getJob() != null; return app.getJob() != null;
} }
@Override
protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) {
return "<a href='" + url("task", taskId.toString()) +
"'>" + ta.getId() + "</a>";
}
/* /*
* (non-Javadoc) * (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock#getTaskAttempts() * @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock#getTaskAttempts()

View File

@ -143,11 +143,10 @@ public class HsTaskPage extends HsView {
} }
long attemptElapsed = long attemptElapsed =
Times.elapsed(attemptStartTime, attemptFinishTime, false); Times.elapsed(attemptStartTime, attemptFinishTime, false);
int sortId = attempt.getID().getId() TaskId taskId = attempt.getID().getTaskId();
+ (attempt.getID().getTaskId().getId() * 10000);
attemptsTableData.append("[\"") attemptsTableData.append("[\"")
.append(sortId + " ").append(taid).append("\",\"") .append(getAttemptId(taskId, ta)).append("\",\"")
.append(ta.getState()).append("\",\"") .append(ta.getState()).append("\",\"")
.append(StringEscapeUtils.escapeJavaScript( .append(StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"") StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"")
@ -234,6 +233,10 @@ public class HsTaskPage extends HsView {
footRow._()._()._(); footRow._()._()._();
} }
protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) {
return ta.getId();
}
/** /**
* @return true if this is a valid request else false. * @return true if this is a valid request else false.
*/ */
@ -296,7 +299,7 @@ public class HsTaskPage extends HsView {
.append(", 'bSearchable': false }") .append(", 'bSearchable': false }")
.append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]")
.append(", 'mRender': parseHadoopAttemptID }") .append(", 'mRender': parseHadoopID }")
.append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6")
//Column numbers are different for maps and reduces //Column numbers are different for maps and reduces

View File

@ -200,7 +200,7 @@ public class TestBlocks {
block.render(html); block.render(html);
pWriter.flush(); pWriter.flush();
// should be printed information about attempts // should be printed information about attempts
assertTrue(data.toString().contains("0 attempt_0_0001_r_000000_0")); assertTrue(data.toString().contains("attempt_0_0001_r_000000_0"));
assertTrue(data.toString().contains("SUCCEEDED")); assertTrue(data.toString().contains("SUCCEEDED"));
assertFalse(data.toString().contains("Processed 128/128 records <p> \n")); assertFalse(data.toString().contains("Processed 128/128 records <p> \n"));
assertTrue(data.toString().contains("Processed 128\\/128 records &lt;p&gt; \\n")); assertTrue(data.toString().contains("Processed 128\\/128 records &lt;p&gt; \\n"));

View File

@ -146,17 +146,13 @@ function parseHadoopID(data, type, full) {
if (type === 'display') { if (type === 'display') {
return data; return data;
} }
//Return the visible string rather than the entire HTML tag
return data.split('>')[1].split('<')[0];
}
//JSON array element is "20000 attempt_1360183373897_0001_m_000002_0" var splits = data.split('>');
function parseHadoopAttemptID(data, type, full) { // Return original string if there is no HTML tag
if (type === 'display' || type === 'filter') { if (splits.length === 1) return data;
return data.split(' ')[1];
} //Return the visible string rather than the entire HTML tag
//For sorting use the order as defined in the JSON element return splits[1].split('<')[0];
return data.split(' ')[0];
} }
function parseHadoopProgress(data, type, full) { function parseHadoopProgress(data, type, full) {