svn merge -c 1577202 FIXES: MAPREDUCE-5789. Average Reduce time is incorrect on Job Overview page. Contributed by Rushabh S Shah
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1577207 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
50c410546b
commit
7f5a157995
|
@ -81,6 +81,9 @@ Release 2.4.0 - UNRELEASED
|
|||
MAPREDUCE-5778. JobSummary does not escape newlines in the job name (Akira
|
||||
AJISAKA via jlowe)
|
||||
|
||||
MAPREDUCE-5789. Average Reduce time is incorrect on Job Overview page
|
||||
(Rushabh S Shah via jlowe)
|
||||
|
||||
Release 2.3.1 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -295,7 +295,7 @@ public class JobInfo {
|
|||
avgMergeTime += attempt.getSortFinishTime()
|
||||
- attempt.getShuffleFinishTime();
|
||||
avgReduceTime += (attempt.getFinishTime() - attempt
|
||||
.getShuffleFinishTime());
|
||||
.getSortFinishTime());
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
|
@ -29,12 +30,22 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.JobACLsManager;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.CompletedJob;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEntities;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
public class TestJobInfo {
|
||||
|
||||
@Test(timeout = 10000)
|
||||
|
@ -66,4 +77,63 @@ public class TestJobInfo {
|
|||
// merge time should be 50.
|
||||
Assert.assertEquals(50L, jobInfo.getAvgMergeTime().longValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAverageReduceTime() {
|
||||
|
||||
Job job = mock(CompletedJob.class);
|
||||
final Task task1 = mock(Task.class);
|
||||
final Task task2 = mock(Task.class);
|
||||
|
||||
JobId jobId = MRBuilderUtils.newJobId(1L, 1, 1);
|
||||
|
||||
final TaskId taskId1 = MRBuilderUtils.newTaskId(jobId, 1, TaskType.REDUCE);
|
||||
final TaskId taskId2 = MRBuilderUtils.newTaskId(jobId, 2, TaskType.REDUCE);
|
||||
|
||||
final TaskAttemptId taskAttemptId1 = MRBuilderUtils.
|
||||
newTaskAttemptId(taskId1, 1);
|
||||
final TaskAttemptId taskAttemptId2 = MRBuilderUtils.
|
||||
newTaskAttemptId(taskId2, 2);
|
||||
|
||||
final TaskAttempt taskAttempt1 = mock(TaskAttempt.class);
|
||||
final TaskAttempt taskAttempt2 = mock(TaskAttempt.class);
|
||||
|
||||
JobReport jobReport = mock(JobReport.class);
|
||||
|
||||
when(taskAttempt1.getState()).thenReturn(TaskAttemptState.SUCCEEDED);
|
||||
when(taskAttempt1.getLaunchTime()).thenReturn(0L);
|
||||
when(taskAttempt1.getShuffleFinishTime()).thenReturn(4L);
|
||||
when(taskAttempt1.getSortFinishTime()).thenReturn(6L);
|
||||
when(taskAttempt1.getFinishTime()).thenReturn(8L);
|
||||
|
||||
when(taskAttempt2.getState()).thenReturn(TaskAttemptState.SUCCEEDED);
|
||||
when(taskAttempt2.getLaunchTime()).thenReturn(5L);
|
||||
when(taskAttempt2.getShuffleFinishTime()).thenReturn(10L);
|
||||
when(taskAttempt2.getSortFinishTime()).thenReturn(22L);
|
||||
when(taskAttempt2.getFinishTime()).thenReturn(42L);
|
||||
|
||||
|
||||
when(task1.getType()).thenReturn(TaskType.REDUCE);
|
||||
when(task2.getType()).thenReturn(TaskType.REDUCE);
|
||||
when(task1.getAttempts()).thenReturn
|
||||
(new HashMap<TaskAttemptId, TaskAttempt>()
|
||||
{{put(taskAttemptId1,taskAttempt1); }});
|
||||
when(task2.getAttempts()).thenReturn
|
||||
(new HashMap<TaskAttemptId, TaskAttempt>()
|
||||
{{put(taskAttemptId2,taskAttempt2); }});
|
||||
|
||||
when(job.getTasks()).thenReturn
|
||||
(new HashMap<TaskId, Task>()
|
||||
{{ put(taskId1,task1); put(taskId2, task2); }});
|
||||
when(job.getID()).thenReturn(jobId);
|
||||
|
||||
when(job.getReport()).thenReturn(jobReport);
|
||||
|
||||
when(job.getName()).thenReturn("TestJobInfo");
|
||||
when(job.getState()).thenReturn(JobState.SUCCEEDED);
|
||||
|
||||
JobInfo jobInfo = new JobInfo(job);
|
||||
|
||||
Assert.assertEquals(11L, jobInfo.getAvgReduceTime().longValue());
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue