From 7f5a157995641b03822f07c4b0b203320f80b4cc Mon Sep 17 00:00:00 2001 From: Jason Darrell Lowe Date: Thu, 13 Mar 2014 15:42:51 +0000 Subject: [PATCH] svn merge -c 1577202 FIXES: MAPREDUCE-5789. Average Reduce time is incorrect on Job Overview page. Contributed by Rushabh S Shah git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1577207 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 + .../mapreduce/v2/hs/webapp/dao/JobInfo.java | 2 +- .../v2/hs/webapp/dao/TestJobInfo.java | 70 +++++++++++++++++++ 3 files changed, 74 insertions(+), 1 deletion(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index f6b1f875d0b..b4a18888739 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -81,6 +81,9 @@ Release 2.4.0 - UNRELEASED MAPREDUCE-5778. JobSummary does not escape newlines in the job name (Akira AJISAKA via jlowe) + MAPREDUCE-5789. Average Reduce time is incorrect on Job Overview page + (Rushabh S Shah via jlowe) + Release 2.3.1 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java index 3fc41e84fa6..3fbb8d26285 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java @@ -295,7 +295,7 @@ public class JobInfo { avgMergeTime += attempt.getSortFinishTime() - attempt.getShuffleFinishTime(); avgReduceTime += (attempt.getFinishTime() - attempt - .getShuffleFinishTime()); + .getSortFinishTime()); } break; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/TestJobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/TestJobInfo.java index c59672fa719..4cf623e0050 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/TestJobInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/TestJobInfo.java @@ -22,6 +22,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.IOException; +import java.util.HashMap; import junit.framework.Assert; @@ -29,12 +30,22 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobACLsManager; import org.apache.hadoop.mapreduce.v2.api.records.JobId; +import org.apache.hadoop.mapreduce.v2.api.records.JobReport; +import org.apache.hadoop.mapreduce.v2.api.records.JobState; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; +import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; +import org.apache.hadoop.mapreduce.v2.api.records.TaskId; +import org.apache.hadoop.mapreduce.v2.api.records.TaskType; +import org.apache.hadoop.mapreduce.v2.app.job.Job; +import org.apache.hadoop.mapreduce.v2.app.job.Task; +import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.CompletedJob; import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEntities; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.junit.Test; + public class TestJobInfo { @Test(timeout = 10000) @@ -66,4 +77,63 @@ public class TestJobInfo { // merge time should be 50. Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue()); } + + @Test + public void testAverageReduceTime() { + + Job job = mock(CompletedJob.class); + final Task task1 = mock(Task.class); + final Task task2 = mock(Task.class); + + JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1); + + final TaskId taskId1 = MRBuilderUtils.newTaskId(jobId, 1, TaskType.REDUCE); + final TaskId taskId2 = MRBuilderUtils.newTaskId(jobId, 2, TaskType.REDUCE); + + final TaskAttemptId taskAttemptId1 = MRBuilderUtils. + newTaskAttemptId(taskId1, 1); + final TaskAttemptId taskAttemptId2 = MRBuilderUtils. + newTaskAttemptId(taskId2, 2); + + final TaskAttempt taskAttempt1 = mock(TaskAttempt.class); + final TaskAttempt taskAttempt2 = mock(TaskAttempt.class); + + JobReport jobReport = mock(JobReport.class); + + when(taskAttempt1.getState()).thenReturn(TaskAttemptState.SUCCEEDED); + when(taskAttempt1.getLaunchTime()).thenReturn(0L); + when(taskAttempt1.getShuffleFinishTime()).thenReturn(4L); + when(taskAttempt1.getSortFinishTime()).thenReturn(6L); + when(taskAttempt1.getFinishTime()).thenReturn(8L); + + when(taskAttempt2.getState()).thenReturn(TaskAttemptState.SUCCEEDED); + when(taskAttempt2.getLaunchTime()).thenReturn(5L); + when(taskAttempt2.getShuffleFinishTime()).thenReturn(10L); + when(taskAttempt2.getSortFinishTime()).thenReturn(22L); + when(taskAttempt2.getFinishTime()).thenReturn(42L); + + + when(task1.getType()).thenReturn(TaskType.REDUCE); + when(task2.getType()).thenReturn(TaskType.REDUCE); + when(task1.getAttempts()).thenReturn + (new HashMap() + {{put(taskAttemptId1,taskAttempt1); }}); + when(task2.getAttempts()).thenReturn + (new HashMap() + {{put(taskAttemptId2,taskAttempt2); }}); + + when(job.getTasks()).thenReturn + (new HashMap() + {{ put(taskId1,task1); put(taskId2, task2); }}); + when(job.getID()).thenReturn(jobId); + + when(job.getReport()).thenReturn(jobReport); + + when(job.getName()).thenReturn("TestJobInfo"); + when(job.getState()).thenReturn(JobState.SUCCEEDED); + + JobInfo jobInfo = new JobInfo(job); + + Assert.assertEquals(11L, jobInfo.getAvgReduceTime().longValue()); + } }