diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 6c22e1410c4..1f3a59c9e9a 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -59,6 +59,9 @@ Release 2.0.5-beta - UNRELEASED MAPREDUCE-5036. Default shuffle handler port should not be 8080. (Sandy Ryza via tomwhite) + MAPREDUCE-5159. Change ValueAggregatorJob to add APIs which can support + binary compatibility with hadoop-1 examples. (Zhijie Shen via vinodkv) + OPTIMIZATIONS MAPREDUCE-4974. Optimising the LineRecordReader initialize() method diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java index f4755f102ff..8c20723b711 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java @@ -102,15 +102,17 @@ public static JobControl createValueAggregatorJobs(String args[]) throws IOExcep /** * Create an Aggregate based map/reduce job. - * + * * @param args the arguments used for job creation. Generic hadoop * arguments are accepted. + * @param caller the the caller class. * @return a JobConf object ready for submission. - * + * * @throws IOException * @see GenericOptionsParser */ - public static JobConf createValueAggregatorJob(String args[]) + @SuppressWarnings("rawtypes") + public static JobConf createValueAggregatorJob(String args[], Class caller) throws IOException { Configuration conf = new Configuration(); @@ -159,7 +161,7 @@ public static JobConf createValueAggregatorJob(String args[]) } String userJarFile = theJob.get("user.jar.file"); if (userJarFile == null) { - theJob.setJarByClass(ValueAggregator.class); + theJob.setJarByClass(caller != null ? caller : ValueAggregatorJob.class); } else { theJob.setJar(userJarFile); } @@ -183,6 +185,21 @@ public static JobConf createValueAggregatorJob(String args[]) return theJob; } + /** + * Create an Aggregate based map/reduce job. + * + * @param args the arguments used for job creation. Generic hadoop + * arguments are accepted. + * @return a JobConf object ready for submission. + * + * @throws IOException + * @see GenericOptionsParser + */ + public static JobConf createValueAggregatorJob(String args[]) + throws IOException { + return createValueAggregatorJob(args, ValueAggregator.class); + } + public static JobConf createValueAggregatorJob(String args[] , Class[] descriptors) throws IOException { @@ -199,7 +216,15 @@ public static void setAggregatorDescriptors(JobConf job job.set("aggregator.descriptor." + i, "UserDefined," + descriptors[i].getName()); } } - + + public static JobConf createValueAggregatorJob(String args[], + Class[] descriptors, + Class caller) throws IOException { + JobConf job = createValueAggregatorJob(args, caller); + setAggregatorDescriptors(job, descriptors); + return job; + } + /** * create and run an Aggregate based map/reduce job. *