MAPREDUCE-5280. Bring back removed constructor and a method in mapreduce ClusterMetrics for binary compatibility with 1.x APIs. Contributed by Mayank Bansal.

svn merge --ignore-ancestry -c 1488458 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1488459 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-06-01 03:59:23 +00:00
parent 089b3edd0e
commit 4e0ac70cea
5 changed files with 70 additions and 19 deletions

View File

@ -126,6 +126,10 @@ Release 2.0.5-beta - UNRELEASED
mapreduce CombineFileRecordReader for binary compatibility with 1.x APIs. mapreduce CombineFileRecordReader for binary compatibility with 1.x APIs.
(Mayank Bansal via vinodkv) (Mayank Bansal via vinodkv)
MAPREDUCE-5280. Bring back removed constructor and a method in mapreduce
ClusterMetrics for binary compatibility with 1.x APIs. (Mayank Bansal via
vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method MAPREDUCE-4974. Optimising the LineRecordReader initialize() method

View File

@ -188,6 +188,7 @@ public class ClusterStatus implements Writable {
private JobTrackerStatus status; private JobTrackerStatus status;
private Collection<BlackListInfo> blacklistedTrackersInfo = private Collection<BlackListInfo> blacklistedTrackersInfo =
new ArrayList<BlackListInfo>(); new ArrayList<BlackListInfo>();
private int grayListedTrackers;
ClusterStatus() {} ClusterStatus() {}
@ -223,9 +224,30 @@ public class ClusterStatus implements Writable {
* @param status the {@link JobTrackerStatus} of the <code>JobTracker</code> * @param status the {@link JobTrackerStatus} of the <code>JobTracker</code>
* @param numDecommissionedNodes number of decommission trackers * @param numDecommissionedNodes number of decommission trackers
*/ */
ClusterStatus(int trackers, int blacklists, long ttExpiryInterval, ClusterStatus(int trackers, int blacklists, long ttExpiryInterval, int maps,
int maps, int reduces, int maxMaps, int maxReduces, int reduces, int maxMaps, int maxReduces, JobTrackerStatus status,
JobTrackerStatus status, int numDecommissionedNodes) { int numDecommissionedNodes) {
this(trackers, blacklists, ttExpiryInterval, maps, reduces, maxMaps,
maxReduces, status, numDecommissionedNodes, 0);
}
/**
* Construct a new cluster status.
*
* @param trackers no. of tasktrackers in the cluster
* @param blacklists no of blacklisted task trackers in the cluster
* @param ttExpiryInterval the tasktracker expiry interval
* @param maps no. of currently running map-tasks in the cluster
* @param reduces no. of currently running reduce-tasks in the cluster
* @param maxMaps the maximum no. of map tasks in the cluster
* @param maxReduces the maximum no. of reduce tasks in the cluster
* @param status the {@link JobTrackerStatus} of the <code>JobTracker</code>
* @param numDecommissionedNodes number of decommission trackers
* @param numGrayListedTrackers number of graylisted trackers
*/
ClusterStatus(int trackers, int blacklists, long ttExpiryInterval, int maps,
int reduces, int maxMaps, int maxReduces, JobTrackerStatus status,
int numDecommissionedNodes, int numGrayListedTrackers) {
numActiveTrackers = trackers; numActiveTrackers = trackers;
numBlacklistedTrackers = blacklists; numBlacklistedTrackers = blacklists;
this.numExcludedNodes = numDecommissionedNodes; this.numExcludedNodes = numDecommissionedNodes;
@ -235,6 +257,7 @@ public class ClusterStatus implements Writable {
max_map_tasks = maxMaps; max_map_tasks = maxMaps;
max_reduce_tasks = maxReduces; max_reduce_tasks = maxReduces;
this.status = status; this.status = status;
this.grayListedTrackers = numGrayListedTrackers;
} }
/** /**
@ -339,7 +362,7 @@ public class ClusterStatus implements Writable {
*/ */
@Deprecated @Deprecated
public int getGraylistedTrackers() { public int getGraylistedTrackers() {
return 0; return grayListedTrackers;
} }
/** /**
@ -480,6 +503,7 @@ public class ClusterStatus implements Writable {
out.writeInt(max_map_tasks); out.writeInt(max_map_tasks);
out.writeInt(max_reduce_tasks); out.writeInt(max_reduce_tasks);
WritableUtils.writeEnum(out, status); WritableUtils.writeEnum(out, status);
out.writeInt(grayListedTrackers);
} }
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
@ -507,5 +531,6 @@ public class ClusterStatus implements Writable {
max_map_tasks = in.readInt(); max_map_tasks = in.readInt();
max_reduce_tasks = in.readInt(); max_reduce_tasks = in.readInt();
status = WritableUtils.readEnum(in, JobTrackerStatus.class); status = WritableUtils.readEnum(in, JobTrackerStatus.class);
grayListedTrackers = in.readInt();
} }
} }

View File

@ -711,17 +711,16 @@ public class JobClient extends CLI {
return clientUgi.doAs(new PrivilegedExceptionAction<ClusterStatus>() { return clientUgi.doAs(new PrivilegedExceptionAction<ClusterStatus>() {
public ClusterStatus run() throws IOException, InterruptedException { public ClusterStatus run() throws IOException, InterruptedException {
ClusterMetrics metrics = cluster.getClusterStatus(); ClusterMetrics metrics = cluster.getClusterStatus();
return new ClusterStatus(metrics.getTaskTrackerCount(), return new ClusterStatus(metrics.getTaskTrackerCount(), metrics
metrics.getBlackListedTaskTrackerCount(), cluster.getTaskTrackerExpiryInterval(), .getBlackListedTaskTrackerCount(), cluster
metrics.getOccupiedMapSlots(), .getTaskTrackerExpiryInterval(), metrics.getOccupiedMapSlots(),
metrics.getOccupiedReduceSlots(), metrics.getMapSlotCapacity(), metrics.getOccupiedReduceSlots(), metrics.getMapSlotCapacity(),
metrics.getReduceSlotCapacity(), metrics.getReduceSlotCapacity(), cluster.getJobTrackerStatus(),
cluster.getJobTrackerStatus(), metrics.getDecommissionedTaskTrackerCount(), metrics
metrics.getDecommissionedTaskTrackerCount()); .getGrayListedTaskTrackerCount());
} }
}); });
} } catch (InterruptedException ie) {
catch (InterruptedException ie) {
throw new IOException(ie); throw new IOException(ie);
} }
} }

View File

@ -69,18 +69,28 @@ public class ClusterMetrics implements Writable {
private int totalJobSubmissions; private int totalJobSubmissions;
private int numTrackers; private int numTrackers;
private int numBlacklistedTrackers; private int numBlacklistedTrackers;
private int numGraylistedTrackers;
private int numDecommissionedTrackers; private int numDecommissionedTrackers;
public ClusterMetrics() { public ClusterMetrics() {
} }
public ClusterMetrics(int runningMaps, int runningReduces, public ClusterMetrics(int runningMaps, int runningReduces,
int occupiedMapSlots, int occupiedReduceSlots, int occupiedMapSlots, int occupiedReduceSlots, int reservedMapSlots,
int reservedMapSlots, int reservedReduceSlots, int reservedReduceSlots, int mapSlots, int reduceSlots,
int mapSlots, int reduceSlots, int totalJobSubmissions, int numTrackers, int numBlacklistedTrackers,
int totalJobSubmissions,
int numTrackers, int numBlacklistedTrackers,
int numDecommissionedNodes) { int numDecommissionedNodes) {
this(runningMaps, runningReduces, occupiedMapSlots, occupiedReduceSlots,
reservedMapSlots, reservedReduceSlots, mapSlots, reduceSlots,
totalJobSubmissions, numTrackers, numBlacklistedTrackers, 0,
numDecommissionedNodes);
}
public ClusterMetrics(int runningMaps, int runningReduces,
int occupiedMapSlots, int occupiedReduceSlots, int reservedMapSlots,
int reservedReduceSlots, int mapSlots, int reduceSlots,
int totalJobSubmissions, int numTrackers, int numBlacklistedTrackers,
int numGraylistedTrackers, int numDecommissionedNodes) {
this.runningMaps = runningMaps; this.runningMaps = runningMaps;
this.runningReduces = runningReduces; this.runningReduces = runningReduces;
this.occupiedMapSlots = occupiedMapSlots; this.occupiedMapSlots = occupiedMapSlots;
@ -92,6 +102,7 @@ public class ClusterMetrics implements Writable {
this.totalJobSubmissions = totalJobSubmissions; this.totalJobSubmissions = totalJobSubmissions;
this.numTrackers = numTrackers; this.numTrackers = numTrackers;
this.numBlacklistedTrackers = numBlacklistedTrackers; this.numBlacklistedTrackers = numBlacklistedTrackers;
this.numGraylistedTrackers = numGraylistedTrackers;
this.numDecommissionedTrackers = numDecommissionedNodes; this.numDecommissionedTrackers = numDecommissionedNodes;
} }
@ -194,6 +205,15 @@ public class ClusterMetrics implements Writable {
return numBlacklistedTrackers; return numBlacklistedTrackers;
} }
/**
* Get the number of graylisted trackers in the cluster.
*
* @return graylisted tracker count
*/
public int getGrayListedTaskTrackerCount() {
return numGraylistedTrackers;
}
/** /**
* Get the number of decommissioned trackers in the cluster. * Get the number of decommissioned trackers in the cluster.
* *
@ -216,6 +236,7 @@ public class ClusterMetrics implements Writable {
totalJobSubmissions = in.readInt(); totalJobSubmissions = in.readInt();
numTrackers = in.readInt(); numTrackers = in.readInt();
numBlacklistedTrackers = in.readInt(); numBlacklistedTrackers = in.readInt();
numGraylistedTrackers = in.readInt();
numDecommissionedTrackers = in.readInt(); numDecommissionedTrackers = in.readInt();
} }
@ -232,6 +253,7 @@ public class ClusterMetrics implements Writable {
out.writeInt(totalJobSubmissions); out.writeInt(totalJobSubmissions);
out.writeInt(numTrackers); out.writeInt(numTrackers);
out.writeInt(numBlacklistedTrackers); out.writeInt(numBlacklistedTrackers);
out.writeInt(numGraylistedTrackers);
out.writeInt(numDecommissionedTrackers); out.writeInt(numDecommissionedTrackers);
} }

View File

@ -206,6 +206,7 @@ public class TestNetworkedJob {
assertEquals(status.getTaskTrackers(), 2); assertEquals(status.getTaskTrackers(), 2);
assertEquals(status.getTTExpiryInterval(), 0); assertEquals(status.getTTExpiryInterval(), 0);
assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING); assertEquals(status.getJobTrackerStatus(), JobTrackerStatus.RUNNING);
assertEquals(status.getGraylistedTrackers(), 0);
// test read and write // test read and write
ByteArrayOutputStream dataOut = new ByteArrayOutputStream(); ByteArrayOutputStream dataOut = new ByteArrayOutputStream();