From 67e975d6530e0fd7cde1471330e85d19aa3c10f3 Mon Sep 17 00:00:00 2001 From: Siddharth Seth Date: Thu, 22 Dec 2011 22:36:08 +0000 Subject: [PATCH] Merge MAPREDUCE-3567 from trunk git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1222499 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../hadoop/mapred/LocalContainerLauncher.java | 4 ++-- .../apache/hadoop/mapred/MapTaskAttemptImpl.java | 4 ++-- .../hadoop/mapred/ReduceTaskAttemptImpl.java | 5 ++--- .../hadoop/mapreduce/v2/app/job/impl/JobImpl.java | 8 ++++---- .../mapreduce/v2/app/job/impl/MapTaskImpl.java | 8 ++++---- .../mapreduce/v2/app/job/impl/ReduceTaskImpl.java | 7 ++++--- .../v2/app/job/impl/TaskAttemptImpl.java | 10 +++++----- .../mapreduce/v2/app/job/impl/TaskImpl.java | 11 ++++++----- .../org/apache/hadoop/mapreduce/v2/app/MRApp.java | 4 ++++ .../hadoop/mapreduce/v2/app/MRAppBenchmark.java | 3 +-- .../mapreduce/v2/app/job/impl/TestTaskImpl.java | 15 ++++++--------- .../apache/hadoop/yarn/event/AsyncDispatcher.java | 14 +++++++------- 13 files changed, 50 insertions(+), 46 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 21001d01f0c..a7eda92f876 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -89,6 +89,9 @@ Release 0.23.1 - Unreleased OPTIMIZATIONS + MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar + Vavilapalli via sseth) + BUG FIXES MAPREDUCE-2950. [Rumen] Fixed TestUserResolve. (Ravi Gummadi via amarrk) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java index cb3e80b8b21..e1163a171df 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java @@ -29,9 +29,9 @@ import java.util.concurrent.LinkedBlockingQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSError; +import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.mapreduce.JobContext; @@ -41,11 +41,11 @@ import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.app.AppContext; +import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; -import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java index a948604085f..aa0894fdb2d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/MapTaskAttemptImpl.java @@ -20,7 +20,6 @@ package org.apache.hadoop.mapred; import java.util.Collection; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; @@ -35,13 +34,14 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.event.EventHandler; +@SuppressWarnings({ "rawtypes", "deprecation" }) public class MapTaskAttemptImpl extends TaskAttemptImpl { private final TaskSplitMetaInfo splitInfo; public MapTaskAttemptImpl(TaskId taskId, int attempt, EventHandler eventHandler, Path jobFile, - int partition, TaskSplitMetaInfo splitInfo, Configuration conf, + int partition, TaskSplitMetaInfo splitInfo, JobConf conf, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java index 1034a5633b2..7af06b9b1a9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ReduceTaskAttemptImpl.java @@ -20,7 +20,6 @@ package org.apache.hadoop.mapred; import java.util.Collection; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; @@ -34,14 +33,14 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.event.EventHandler; - +@SuppressWarnings({ "rawtypes", "deprecation" }) public class ReduceTaskAttemptImpl extends TaskAttemptImpl { private final int numMapTasks; public ReduceTaskAttemptImpl(TaskId id, int attempt, EventHandler eventHandler, Path jobFile, int partition, - int numMapTasks, Configuration conf, + int numMapTasks, JobConf conf, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java index 9291075e6d2..cb9171cedc2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java @@ -54,7 +54,6 @@ import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent; import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent; import org.apache.hadoop.mapreduce.lib.chain.ChainMapper; import org.apache.hadoop.mapreduce.lib.chain.ChainReducer; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.security.TokenCache; import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier; import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager; @@ -110,6 +109,7 @@ import org.apache.hadoop.yarn.state.StateMachineFactory; /** Implementation of Job interface. Maintains the state machines of Job. * The read and write calls use ReadWriteLock for concurrency. */ +@SuppressWarnings({ "rawtypes", "deprecation" }) public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job, EventHandler { @@ -154,7 +154,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job, // Can then replace task-level uber counters (MR-2424) with job-level ones // sent from LocalContainerLauncher, and eventually including a count of // of uber-AM attempts (probably sent from MRAppMaster). - public Configuration conf; + public JobConf conf; //fields initialized in init private FileSystem fs; @@ -371,7 +371,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job, this.applicationAttemptId = applicationAttemptId; this.jobId = jobId; this.jobName = conf.get(JobContext.JOB_NAME, ""); - this.conf = conf; + this.conf = new JobConf(conf); this.metrics = metrics; this.clock = clock; this.completedTasksFromPreviousRun = completedTasksFromPreviousRun; @@ -979,7 +979,7 @@ public class JobImpl implements org.apache.hadoop.mapreduce.v2.app.job.Job, job.oldJobId); } else { job.jobContext = new org.apache.hadoop.mapred.JobContextImpl( - new JobConf(job.conf), job.oldJobId); + job.conf, job.oldJobId); } long inputLength = 0; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java index 119cc5190af..5bf3d94c877 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/MapTaskImpl.java @@ -21,8 +21,8 @@ package org.apache.hadoop.mapreduce.v2.app.job.impl; import java.util.Collection; import java.util.Set; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MapTaskAttemptImpl; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; @@ -31,20 +31,20 @@ import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; -import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener; +import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.event.EventHandler; - +@SuppressWarnings({ "rawtypes", "deprecation" }) public class MapTaskImpl extends TaskImpl { private final TaskSplitMetaInfo taskSplitMetaInfo; public MapTaskImpl(JobId jobId, int partition, EventHandler eventHandler, - Path remoteJobConfFile, Configuration conf, + Path remoteJobConfFile, JobConf conf, TaskSplitMetaInfo taskSplitMetaInfo, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java index ae2e84a3778..a2f386aaab0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/ReduceTaskImpl.java @@ -21,8 +21,8 @@ package org.apache.hadoop.mapreduce.v2.app.job.impl; import java.util.Collection; import java.util.Set; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.ReduceTaskAttemptImpl; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; @@ -30,19 +30,20 @@ import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; -import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener; +import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.event.EventHandler; +@SuppressWarnings({ "rawtypes", "deprecation" }) public class ReduceTaskImpl extends TaskImpl { private final int numMapTasks; public ReduceTaskImpl(JobId jobId, int partition, - EventHandler eventHandler, Path jobFile, Configuration conf, + EventHandler eventHandler, Path jobFile, JobConf conf, int numMapTasks, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 46479ee142c..52d3b47b62b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -125,6 +125,7 @@ import org.apache.hadoop.yarn.util.RackResolver; /** * Implementation of TaskAttempt interface. */ +@SuppressWarnings({ "rawtypes", "deprecation" }) public abstract class TaskAttemptImpl implements org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt, EventHandler { @@ -135,10 +136,9 @@ public abstract class TaskAttemptImpl implements private static final int REDUCE_MEMORY_MB_DEFAULT = 1024; private final static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); - protected final Configuration conf; + protected final JobConf conf; protected final Path jobFile; protected final int partition; - @SuppressWarnings("rawtypes") protected final EventHandler eventHandler; private final TaskAttemptId attemptId; private final Clock clock; @@ -445,9 +445,9 @@ public abstract class TaskAttemptImpl implements .getProperty("line.separator"); public TaskAttemptImpl(TaskId taskId, int i, - @SuppressWarnings("rawtypes") EventHandler eventHandler, + EventHandler eventHandler, TaskAttemptListener taskAttemptListener, Path jobFile, int partition, - Configuration conf, String[] dataLocalHosts, OutputCommitter committer, + JobConf conf, String[] dataLocalHosts, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock) { oldJobId = TypeConverter.fromYarn(taskId.getJobId()); @@ -1199,7 +1199,7 @@ public abstract class TaskAttemptImpl implements TaskAttemptEvent event) { @SuppressWarnings("deprecation") TaskAttemptContext taskContext = - new TaskAttemptContextImpl(new JobConf(taskAttempt.conf), + new TaskAttemptContextImpl(taskAttempt.conf, TypeConverter.fromYarn(taskAttempt.attemptId)); taskAttempt.eventHandler.handle(new TaskCleanupEvent( taskAttempt.attemptId, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java index a7c64915124..b0708540834 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java @@ -31,8 +31,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; @@ -50,8 +50,6 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskReport; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; -import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; -import org.apache.hadoop.mapreduce.v2.app.rm.ContainerFailedEvent; import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; @@ -66,6 +64,8 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskTAttemptEvent; +import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; +import org.apache.hadoop.mapreduce.v2.app.rm.ContainerFailedEvent; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.yarn.Clock; @@ -81,11 +81,12 @@ import org.apache.hadoop.yarn.state.StateMachineFactory; /** * Implementation of Task interface. */ +@SuppressWarnings({ "rawtypes", "unchecked", "deprecation" }) public abstract class TaskImpl implements Task, EventHandler { private static final Log LOG = LogFactory.getLog(TaskImpl.class); - protected final Configuration conf; + protected final JobConf conf; protected final Path jobFile; protected final OutputCommitter committer; protected final int partition; @@ -225,7 +226,7 @@ public abstract class TaskImpl implements Task, EventHandler { } public TaskImpl(JobId jobId, TaskType taskType, int partition, - EventHandler eventHandler, Path remoteJobConfFile, Configuration conf, + EventHandler eventHandler, Path remoteJobConfFile, JobConf conf, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index 888bec3e508..561ecac8a91 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -119,6 +119,10 @@ public class MRApp extends MRAppMaster { this(maps, reduces, autoComplete, testName, cleanOnStart, 1); } + @Override + protected void downloadTokensAndSetupUGI(Configuration conf) { + } + private static ApplicationAttemptId getApplicationAttemptId( ApplicationId applicationId, int startCount) { ApplicationAttemptId applicationAttemptId = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java index 0d6c7d7576c..279b81199ae 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java @@ -30,7 +30,6 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssigned import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.yarn.YarnException; -import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; @@ -169,7 +168,7 @@ public class MRAppBenchmark { } public void benchmark1() throws Exception { - int maps = 900; + int maps = 100000; int reduces = 100; System.out.println("Running benchmark with maps:"+maps + " reduces:"+reduces); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java index 5a0f89ffd9e..b55bae22546 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskImpl.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.mapreduce.v2.app.job.impl; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -29,7 +29,6 @@ import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Task; @@ -60,11 +59,12 @@ import org.junit.After; import org.junit.Before; import org.junit.Test; +@SuppressWarnings({ "rawtypes", "deprecation" }) public class TestTaskImpl { private static final Log LOG = LogFactory.getLog(TestTaskImpl.class); - private Configuration conf; + private JobConf conf; private TaskAttemptListener taskAttemptListener; private OutputCommitter committer; private Token jobToken; @@ -91,9 +91,8 @@ public class TestTaskImpl { private int taskAttemptCounter = 0; - @SuppressWarnings("rawtypes") public MockTaskImpl(JobId jobId, int partition, - EventHandler eventHandler, Path remoteJobConfFile, Configuration conf, + EventHandler eventHandler, Path remoteJobConfFile, JobConf conf, TaskAttemptListener taskAttemptListener, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock, @@ -132,10 +131,9 @@ public class TestTaskImpl { private TaskAttemptState state = TaskAttemptState.NEW; private TaskAttemptId attemptId; - @SuppressWarnings("rawtypes") public MockTaskAttemptImpl(TaskId taskId, int id, EventHandler eventHandler, TaskAttemptListener taskAttemptListener, Path jobFile, int partition, - Configuration conf, OutputCommitter committer, + JobConf conf, OutputCommitter committer, Token jobToken, Collection> fsTokens, Clock clock) { super(taskId, id, eventHandler, taskAttemptListener, jobFile, partition, conf, @@ -175,7 +173,6 @@ public class TestTaskImpl { private class MockTask extends Task { @Override - @SuppressWarnings("deprecation") public void run(JobConf job, TaskUmbilicalProtocol umbilical) throws IOException, ClassNotFoundException, InterruptedException { return; @@ -195,7 +192,7 @@ public class TestTaskImpl { ++startCount; - conf = new Configuration(); + conf = new JobConf(); taskAttemptListener = mock(TaskAttemptListener.class); committer = mock(OutputCommitter.class); jobToken = (Token) mock(Token.class); diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java index 8a5fceecb09..ffa2e9cfc60 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java @@ -75,7 +75,7 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { try { event = eventQueue.take(); } catch(InterruptedException ie) { - LOG.info("AsyncDispatcher thread interrupted", ie); + LOG.warn("AsyncDispatcher thread interrupted", ie); return; } if (event != null) { @@ -114,8 +114,10 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { @SuppressWarnings("unchecked") protected void dispatch(Event event) { //all events go thru this loop - LOG.debug("Dispatching the event " + event.getClass().getName() + "." - + event.toString()); + if (LOG.isDebugEnabled()) { + LOG.debug("Dispatching the event " + event.getClass().getName() + "." + + event.toString()); + } Class type = event.getType().getDeclaringClass(); @@ -131,12 +133,11 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { } } + @SuppressWarnings("unchecked") @Override - @SuppressWarnings("rawtypes") public void register(Class eventType, EventHandler handler) { /* check to see if we have a listener registered */ - @SuppressWarnings("unchecked") EventHandler registeredHandler = (EventHandler) eventDispatchers.get(eventType); LOG.info("Registering " + eventType + " for " + handler.getClass()); @@ -170,7 +171,7 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { } int remCapacity = eventQueue.remainingCapacity(); if (remCapacity < 1000) { - LOG.info("Very low remaining capacity in the event-queue: " + LOG.warn("Very low remaining capacity in the event-queue: " + remCapacity); } try { @@ -186,7 +187,6 @@ public class AsyncDispatcher extends AbstractService implements Dispatcher { * are interested in the event. * @param the type of event these multiple handlers are interested in. */ - @SuppressWarnings("rawtypes") static class MultiListenerHandler implements EventHandler { List> listofHandlers;