From 34e744ce1a26431e06613f5c0cd61045d2921a10 Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Sat, 20 Jun 2015 11:38:21 -0700 Subject: [PATCH] MAPREDUCE-6316. Task Attempt List entries should link to the task overview. (Siqi Li via gera) (cherry picked from commit 63d40d528654707b3f56619a4eb20e21cdeadc7e) --- hadoop-mapreduce-project/CHANGES.txt | 3 + .../mapreduce/v2/app/webapp/AttemptsPage.java | 8 ++ .../mapreduce/v2/app/webapp/TaskPage.java | 11 ++- .../mapreduce/v2/app/webapp/TestBlocks.java | 92 ++++++++++++++++++- .../v2/hs/webapp/HsAttemptsPage.java | 7 ++ .../mapreduce/v2/hs/webapp/HsTaskPage.java | 11 ++- .../mapreduce/v2/hs/webapp/TestBlocks.java | 2 +- .../webapps/static/yarn.dt.plugins.js | 16 ++-- 8 files changed, 133 insertions(+), 17 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 4e24e544f1b..45231463c53 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -88,6 +88,9 @@ Release 2.8.0 - UNRELEASED MAPREDUCE-6395. Improve the commit failure messages in MRAppMaster recovery. (Brahma Reddy Battula via gera) + MAPREDUCE-6316. Task Attempt List entries should link to the task overview. + (Siqi Li via gera) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java index 5dda01e38ac..3b45b168b7d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java @@ -29,9 +29,11 @@ import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; +import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.yarn.webapp.SubView; @@ -50,6 +52,12 @@ public class AttemptsPage extends TaskPage { return true; } + @Override + protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) { + return "" + ta.getId() + ""; + } + @Override protected Collection getTaskAttempts() { List fewTaskAttemps = new ArrayList(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java index d9f17c8ade9..19b0d7cfdfe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java @@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; +import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil; @@ -128,8 +129,9 @@ public class TaskPage extends AppView { String nodeHttpAddr = ta.getNode(); String diag = ta.getNote() == null ? "" : ta.getNote(); + TaskId taskId = attempt.getID().getTaskId(); attemptsTableData.append("[\"") - .append(ta.getId()).append("\",\"") + .append(getAttemptId(taskId, ta)).append("\",\"") .append(progress).append("\",\"") .append(ta.getState().toString()).append("\",\"") .append(StringEscapeUtils.escapeJavaScript( @@ -182,6 +184,10 @@ public class TaskPage extends AppView { } + protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) { + return ta.getId(); + } + protected boolean isValidRequest() { return app.getTask() != null; } @@ -215,6 +221,9 @@ public class TaskPage extends AppView { .append("\n{'aTargets': [ 5 ]") .append(", 'bSearchable': false }") + .append("\n, {'sType':'string', 'aTargets': [ 0 ]") + .append(", 'mRender': parseHadoopID }") + .append("\n, {'sType':'numeric', 'aTargets': [ 6, 7") .append(" ], 'mRender': renderHadoopDate }") diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java index 13f91e03569..3876fe89dc5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestBlocks.java @@ -24,19 +24,24 @@ import java.util.HashMap; import java.util.Map; import org.junit.Test; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.v2.api.records.JobId; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; +import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; +import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; +import org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl; import org.apache.hadoop.yarn.webapp.view.BlockForTest; @@ -137,8 +142,68 @@ public class TestBlocks { assertTrue(data.toString().contains("100011")); assertFalse(data.toString().contains("Dummy Status \n*")); assertTrue(data.toString().contains("Dummy Status \\n*")); + } + /** + * test AttemptsBlock's rendering. + */ + @Test + public void testAttemptsBlock() { + AppContext ctx = mock(AppContext.class); + AppForTest app = new AppForTest(ctx); + JobId jobId = new JobIdPBImpl(); + jobId.setId(0); + jobId.setAppId(ApplicationIdPBImpl.newInstance(0,1)); + + TaskId taskId = new TaskIdPBImpl(); + taskId.setId(0); + taskId.setTaskType(TaskType.REDUCE); + taskId.setJobId(jobId); + Task task = mock(Task.class); + when(task.getID()).thenReturn(taskId); + TaskReport report = mock(TaskReport.class); + + when(task.getReport()).thenReturn(report); + when(task.getType()).thenReturn(TaskType.REDUCE); + + Map tasks = + new HashMap(); + Map attempts = + new HashMap(); + TaskAttempt attempt = mock(TaskAttempt.class); + TaskAttemptId taId = new TaskAttemptIdPBImpl(); + taId.setId(0); + taId.setTaskId(task.getID()); + when(attempt.getID()).thenReturn(taId); + + final TaskAttemptState taState = TaskAttemptState.SUCCEEDED; + when(attempt.getState()).thenReturn(taState); + TaskAttemptReport taReport = mock(TaskAttemptReport.class); + when(taReport.getTaskAttemptState()).thenReturn(taState); + when(attempt.getReport()).thenReturn(taReport); + attempts.put(taId, attempt); + tasks.put(taskId, task); + when(task.getAttempts()).thenReturn(attempts); + + app.setTask(task); + Job job = mock(Job.class); + when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks); + app.setJob(job); + + AttemptsBlockForTest block = new AttemptsBlockForTest(app, + new Configuration()); + block.addParameter(AMParams.TASK_TYPE, "r"); + block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL"); + + PrintWriter pWriter = new PrintWriter(data); + Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false); + + block.render(html); + pWriter.flush(); + assertTrue(data.toString().contains( + "" + +"attempt_0_0001_r_000000_0")); } private class ConfBlockForTest extends ConfBlock { @@ -168,4 +233,29 @@ public class TestBlocks { } } + private class AttemptsBlockForTest extends FewAttemptsBlock { + private final Map params = new HashMap(); + + public void addParameter(String name, String value) { + params.put(name, value); + } + + public String $(String key, String defaultValue) { + String value = params.get(key); + return value == null ? defaultValue : value; + } + + public AttemptsBlockForTest(App ctx, Configuration conf) { + super(ctx, conf); + } + + @Override + public String url(String... parts) { + String result = "url://"; + for (String string : parts) { + result += string + ":"; + } + return result; + } + } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java index 1a6bab9e7df..9233fd3eef8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAttemptsPage.java @@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.webapp.App; +import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI; import org.apache.hadoop.yarn.webapp.SubView; @@ -59,6 +60,12 @@ public class HsAttemptsPage extends HsTaskPage { return app.getJob() != null; } + @Override + protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) { + return "" + ta.getId() + ""; + } + /* * (non-Javadoc) * @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsTaskPage.AttemptsBlock#getTaskAttempts() diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java index 5bd8684449a..ba75018e46b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java @@ -143,11 +143,10 @@ public class HsTaskPage extends HsView { } long attemptElapsed = Times.elapsed(attemptStartTime, attemptFinishTime, false); - int sortId = attempt.getID().getId() - + (attempt.getID().getTaskId().getId() * 10000); + TaskId taskId = attempt.getID().getTaskId(); attemptsTableData.append("[\"") - .append(sortId + " ").append(taid).append("\",\"") + .append(getAttemptId(taskId, ta)).append("\",\"") .append(ta.getState()).append("\",\"") .append(StringEscapeUtils.escapeJavaScript( StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"") @@ -234,6 +233,10 @@ public class HsTaskPage extends HsView { footRow._()._()._(); } + protected String getAttemptId(TaskId taskId, TaskAttemptInfo ta) { + return ta.getId(); + } + /** * @return true if this is a valid request else false. */ @@ -296,7 +299,7 @@ public class HsTaskPage extends HsView { .append(", 'bSearchable': false }") .append("\n, {'sType':'numeric', 'aTargets': [ 0 ]") - .append(", 'mRender': parseHadoopAttemptID }") + .append(", 'mRender': parseHadoopID }") .append("\n, {'sType':'numeric', 'aTargets': [ 5, 6") //Column numbers are different for maps and reduces diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java index b82965a38e8..d1a704f5f0f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestBlocks.java @@ -200,7 +200,7 @@ public class TestBlocks { block.render(html); pWriter.flush(); // should be printed information about attempts - assertTrue(data.toString().contains("0 attempt_0_0001_r_000000_0")); + assertTrue(data.toString().contains("attempt_0_0001_r_000000_0")); assertTrue(data.toString().contains("SUCCEEDED")); assertFalse(data.toString().contains("Processed 128/128 records

\n")); assertTrue(data.toString().contains("Processed 128\\/128 records <p> \\n")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/yarn.dt.plugins.js b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/yarn.dt.plugins.js index c9416fd005e..c0032725c43 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/yarn.dt.plugins.js +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/webapps/static/yarn.dt.plugins.js @@ -146,17 +146,13 @@ function parseHadoopID(data, type, full) { if (type === 'display') { return data; } - //Return the visible string rather than the entire HTML tag - return data.split('>')[1].split('<')[0]; -} -//JSON array element is "20000 attempt_1360183373897_0001_m_000002_0" -function parseHadoopAttemptID(data, type, full) { - if (type === 'display' || type === 'filter') { - return data.split(' ')[1]; - } - //For sorting use the order as defined in the JSON element - return data.split(' ')[0]; + var splits = data.split('>'); + // Return original string if there is no HTML tag + if (splits.length === 1) return data; + + //Return the visible string rather than the entire HTML tag + return splits[1].split('<')[0]; } function parseHadoopProgress(data, type, full) {