MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support binary compatibility with hadoop-1 examples. Contributed by Zhijie Shen.

svn merge --ignore-ancestry -c 1480394 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1480396 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-05-08 18:29:10 +00:00
parent e422a22297
commit 2f7c04f53a
2 changed files with 33 additions and 5 deletions

View File

@ -59,6 +59,9 @@ Release 2.0.5-beta - UNRELEASED
MAPREDUCE-5036. Default shuffle handler port should not be 8080. MAPREDUCE-5036. Default shuffle handler port should not be 8080.
(Sandy Ryza via tomwhite) (Sandy Ryza via tomwhite)
MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support
binary compatibility with hadoop-1 examples. (Zhijie Shen via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method MAPREDUCE-4974. Optimising the LineRecordReader initialize() method

View File

@ -102,15 +102,17 @@ public class ValueAggregatorJob {
/** /**
* Create an Aggregate based map/reduce job. * Create an Aggregate based map/reduce job.
* *
* @param args the arguments used for job creation. Generic hadoop * @param args the arguments used for job creation. Generic hadoop
* arguments are accepted. * arguments are accepted.
* @param caller the the caller class.
* @return a JobConf object ready for submission. * @return a JobConf object ready for submission.
* *
* @throws IOException * @throws IOException
* @see GenericOptionsParser * @see GenericOptionsParser
*/ */
public static JobConf createValueAggregatorJob(String args[]) @SuppressWarnings("rawtypes")
public static JobConf createValueAggregatorJob(String args[], Class<?> caller)
throws IOException { throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
@ -159,7 +161,7 @@ public class ValueAggregatorJob {
} }
String userJarFile = theJob.get("user.jar.file"); String userJarFile = theJob.get("user.jar.file");
if (userJarFile == null) { if (userJarFile == null) {
theJob.setJarByClass(ValueAggregator.class); theJob.setJarByClass(caller != null ? caller : ValueAggregatorJob.class);
} else { } else {
theJob.setJar(userJarFile); theJob.setJar(userJarFile);
} }
@ -183,6 +185,21 @@ public class ValueAggregatorJob {
return theJob; return theJob;
} }
/**
* Create an Aggregate based map/reduce job.
*
* @param args the arguments used for job creation. Generic hadoop
* arguments are accepted.
* @return a JobConf object ready for submission.
*
* @throws IOException
* @see GenericOptionsParser
*/
public static JobConf createValueAggregatorJob(String args[])
throws IOException {
return createValueAggregatorJob(args, ValueAggregator.class);
}
public static JobConf createValueAggregatorJob(String args[] public static JobConf createValueAggregatorJob(String args[]
, Class<? extends ValueAggregatorDescriptor>[] descriptors) , Class<? extends ValueAggregatorDescriptor>[] descriptors)
throws IOException { throws IOException {
@ -199,7 +216,15 @@ public class ValueAggregatorJob {
job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName()); job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName());
} }
} }
public static JobConf createValueAggregatorJob(String args[],
Class<? extends ValueAggregatorDescriptor>[] descriptors,
Class<?> caller) throws IOException {
JobConf job = createValueAggregatorJob(args, caller);
setAggregatorDescriptors(job, descriptors);
return job;
}
/** /**
* create and run an Aggregate based map/reduce job. * create and run an Aggregate based map/reduce job.
* *