diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index b89cf19ebe6..22ad230ead9 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -540,6 +540,9 @@ Release 2.1.0-beta - UNRELEASED
MAPREDUCE-5312. TestRMNMInfo is failing. (sandyr via tucu)
+ MAPREDUCE-5304. mapreduce.Job killTask/failTask/getTaskCompletionEvents
+ methods have incompatible signature changes. (kkambatl via tucu)
+
Release 2.0.5-alpha - 06/06/2013
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
index 96ab111a5c1..742da3f86a4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
@@ -62,8 +62,9 @@ public class TaskCompletionEvent
super(eventId, taskId, idWithinJob, isMap, org.apache.hadoop.mapreduce.
TaskCompletionEvent.Status.valueOf(status.name()), taskTrackerHttp);
}
-
- static TaskCompletionEvent downgrade(
+
+ @Private
+ public static TaskCompletionEvent downgrade(
org.apache.hadoop.mapreduce.TaskCompletionEvent event) {
return new TaskCompletionEvent(event.getEventId(),
TaskAttemptID.downgrade(event.getTaskAttemptId()),event.idWithinJob(),
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
index 09eb8d5028e..78c6b4b1a9c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java
@@ -675,37 +675,57 @@ public class Job extends JobContextImpl implements JobContext {
* Get events indicating completion (success/failure) of component tasks.
*
* @param startFrom index to start fetching events from
- * @return an array of {@link TaskCompletionEvent}s
+ * @return an array of {@link org.apache.hadoop.mapred.TaskCompletionEvent}s
* @throws IOException
*/
- public TaskCompletionEvent[] getTaskCompletionEvents(final int startFrom)
- throws IOException {
+ public org.apache.hadoop.mapred.TaskCompletionEvent[]
+ getTaskCompletionEvents(final int startFrom) throws IOException {
try {
- return getTaskCompletionEvents(startFrom, 10);
+ TaskCompletionEvent[] events = getTaskCompletionEvents(startFrom, 10);
+ org.apache.hadoop.mapred.TaskCompletionEvent[] retEvents =
+ new org.apache.hadoop.mapred.TaskCompletionEvent[events.length];
+ for (int i = 0; i < events.length; i++) {
+ retEvents[i] = org.apache.hadoop.mapred.TaskCompletionEvent.downgrade
+ (events[i]);
+ }
+ return retEvents;
} catch (InterruptedException ie) {
throw new IOException(ie);
}
}
+ /**
+ * Kill indicated task attempt.
+ * @param taskId the id of the task to kill.
+ * @param shouldFail if true
the task is failed and added
+ * to failed tasks list, otherwise it is just killed,
+ * w/o affecting job failure status.
+ */
+ @Private
+ public boolean killTask(final TaskAttemptID taskId,
+ final boolean shouldFail) throws IOException {
+ ensureState(JobState.RUNNING);
+ try {
+ return ugi.doAs(new PrivilegedExceptionAction() {
+ public Boolean run() throws IOException, InterruptedException {
+ return cluster.getClient().killTask(taskId, shouldFail);
+ }
+ });
+ }
+ catch (InterruptedException ie) {
+ throw new IOException(ie);
+ }
+ }
+
/**
* Kill indicated task attempt.
*
* @param taskId the id of the task to be terminated.
* @throws IOException
*/
- public boolean killTask(final TaskAttemptID taskId)
+ public void killTask(final TaskAttemptID taskId)
throws IOException {
- ensureState(JobState.RUNNING);
- try {
- return ugi.doAs(new PrivilegedExceptionAction() {
- public Boolean run() throws IOException, InterruptedException {
- return cluster.getClient().killTask(taskId, false);
- }
- });
- }
- catch (InterruptedException ie) {
- throw new IOException(ie);
- }
+ killTask(taskId, false);
}
/**
@@ -714,20 +734,9 @@ public class Job extends JobContextImpl implements JobContext {
* @param taskId the id of the task to be terminated.
* @throws IOException
*/
- public boolean failTask(final TaskAttemptID taskId)
+ public void failTask(final TaskAttemptID taskId)
throws IOException {
- ensureState(JobState.RUNNING);
- try {
- return ugi.doAs(new PrivilegedExceptionAction() {
- @Override
- public Boolean run() throws IOException, InterruptedException {
- return cluster.getClient().killTask(taskId, true);
- }
- });
- }
- catch (InterruptedException ie) {
- throw new IOException(ie);
- }
+ killTask(taskId, true);
}
/**
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index 6de078d6467..37412e018d5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -322,7 +322,7 @@ public class CLI extends Configured implements Tool {
Job job = cluster.getJob(taskID.getJobID());
if (job == null) {
System.out.println("Could not find job " + jobid);
- } else if (job.killTask(taskID)) {
+ } else if (job.killTask(taskID, false)) {
System.out.println("Killed task " + taskid);
exitCode = 0;
} else {
@@ -334,7 +334,7 @@ public class CLI extends Configured implements Tool {
Job job = cluster.getJob(taskID.getJobID());
if (job == null) {
System.out.println("Could not find job " + jobid);
- } else if(job.failTask(taskID)) {
+ } else if(job.killTask(taskID, true)) {
System.out.println("Killed task " + taskID + " by failing it");
exitCode = 0;
} else {