MAPREDUCE-5234. Change mapred.TaskReport and mapreduce.TaskReport for binary compatibility with mapred in 1.x but incompatible with 0.23.x. Contributed by Mayank Bansal.

svn merge --ignore-ancestry -c 1483940 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1483941 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-05-17 18:17:57 +00:00
parent f0bd8da38b
commit d180262cf9
4 changed files with 47 additions and 7 deletions

View File

@ -7,6 +7,10 @@ Release 2.0.5-beta - UNRELEASED
MAPREDUCE-4067. Changed MRClientProtocol api to throw IOException only (Xuan MAPREDUCE-4067. Changed MRClientProtocol api to throw IOException only (Xuan
Gong via vinodkv) Gong via vinodkv)
MAPREDUCE-5234. Change mapred.TaskReport and mapreduce.TaskReport for binary
compatibility with mapred in 1.x but incompatible with 0.23.x. (Mayank Bansal
via vinodkv)
NEW FEATURES NEW FEATURES
IMPROVEMENTS IMPROVEMENTS

View File

@ -75,7 +75,7 @@ public class TaskReport extends org.apache.hadoop.mapreduce.TaskReport {
static TaskReport downgrade( static TaskReport downgrade(
org.apache.hadoop.mapreduce.TaskReport report) { org.apache.hadoop.mapreduce.TaskReport report) {
return new TaskReport(TaskID.downgrade(report.getTaskId()), return new TaskReport(TaskID.downgrade(report.getTaskID()),
report.getProgress(), report.getState(), report.getDiagnostics(), report.getProgress(), report.getState(), report.getDiagnostics(),
report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(), report.getCurrentStatus(), report.getStartTime(), report.getFinishTime(),
Counters.downgrade(report.getTaskCounters())); Counters.downgrade(report.getTaskCounters()));
@ -90,9 +90,16 @@ public class TaskReport extends org.apache.hadoop.mapreduce.TaskReport {
return ret.toArray(new TaskReport[0]); return ret.toArray(new TaskReport[0]);
} }
/** The string of the task id. */
public String getTaskId() {
return TaskID.downgrade(super.getTaskID()).toString();
}
/** The id of the task. */ /** The id of the task. */
public TaskID getTaskID() { return TaskID.downgrade(super.getTaskId()); } public TaskID getTaskID() {
return TaskID.downgrade(super.getTaskID());
}
public Counters getCounters() { public Counters getCounters() {
return Counters.downgrade(super.getTaskCounters()); return Counters.downgrade(super.getTaskCounters());
} }

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.TIPStatus; import org.apache.hadoop.mapred.TIPStatus;
import org.apache.hadoop.mapred.TaskID;
import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringInterner;
/** A report on the state of a task. */ /** A report on the state of a task. */
@ -76,9 +77,16 @@ public class TaskReport implements Writable {
this.finishTime = finishTime; this.finishTime = finishTime;
this.counters = counters; this.counters = counters;
} }
/** The id of the task. */ /** The string of the task ID. */
public TaskID getTaskId() { return taskid; } public String getTaskId() {
return taskid.toString();
}
/** The ID of the task. */
public TaskID getTaskID() {
return taskid;
}
/** The amount completed, between zero and one. */ /** The amount completed, between zero and one. */
public float getProgress() { return progress; } public float getProgress() { return progress; }
@ -171,7 +179,7 @@ public class TaskReport implements Writable {
&& this.progress == report.getProgress() && this.progress == report.getProgress()
&& this.startTime == report.getStartTime() && this.startTime == report.getStartTime()
&& this.state.equals(report.getState()) && this.state.equals(report.getState())
&& this.taskid.equals(report.getTaskId()); && this.taskid.equals(report.getTaskID());
} }
return false; return false;
} }

View File

@ -26,6 +26,11 @@ import java.io.IOException;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@ -53,4 +58,20 @@ public class TestJobInfo {
assertEquals(info.getUser().toString(), copyinfo.getUser().toString()); assertEquals(info.getUser().toString(), copyinfo.getUser().toString());
} }
@Test(timeout = 5000)
public void testTaskID() throws IOException, InterruptedException {
JobID jobid = new JobID("1014873536921", 6);
TaskID tid = new TaskID(jobid, TaskType.MAP, 0);
org.apache.hadoop.mapred.TaskID tid1 =
org.apache.hadoop.mapred.TaskID.downgrade(tid);
org.apache.hadoop.mapred.TaskReport treport =
new org.apache.hadoop.mapred.TaskReport(tid1, 0.0f,
State.FAILED.toString(), null, TIPStatus.FAILED, 100, 100,
new org.apache.hadoop.mapred.Counters());
Assert
.assertEquals(treport.getTaskId(), "task_1014873536921_0006_m_000000");
Assert.assertEquals(treport.getTaskID().toString(),
"task_1014873536921_0006_m_000000");
}
} }