MAPREDUCE-5800. Use Job#getInstance instead of deprecated constructors. (aajisaka)

This commit is contained in:
Akira Ajisaka 2015-02-03 14:30:09 -08:00
parent c559df2219
commit bd69fb2d44
39 changed files with 49 additions and 49 deletions

View File

@ -271,6 +271,9 @@ Release 2.7.0 - UNRELEASED
MAPREDUCE-6143. add configuration for mapreduce speculative execution in MAPREDUCE-6143. add configuration for mapreduce speculative execution in
MR2 (zxu via rkanter) MR2 (zxu via rkanter)
MAPREDUCE-5800. Use Job#getInstance instead of deprecated constructors
(aajisaka)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-6169. MergeQueue should release reference to the current item MAPREDUCE-6169. MergeQueue should release reference to the current item

View File

@ -51,7 +51,7 @@ public class Job extends ControlledJob {
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Job(JobConf jobConf, ArrayList<?> dependingJobs) throws IOException { public Job(JobConf jobConf, ArrayList<?> dependingJobs) throws IOException {
super(new org.apache.hadoop.mapreduce.Job(jobConf), super(org.apache.hadoop.mapreduce.Job.getInstance(jobConf),
(List<ControlledJob>) dependingJobs); (List<ControlledJob>) dependingJobs);
} }
@ -93,7 +93,7 @@ public class Job extends ControlledJob {
*/ */
public synchronized void setJobConf(JobConf jobConf) { public synchronized void setJobConf(JobConf jobConf) {
try { try {
super.setJob(new org.apache.hadoop.mapreduce.Job(jobConf)); super.setJob(org.apache.hadoop.mapreduce.Job.getInstance(jobConf));
} catch (IOException ioe) { } catch (IOException ioe) {
LOG.info("Exception" + ioe); LOG.info("Exception" + ioe);
} }

View File

@ -73,7 +73,7 @@ public abstract class CombineFileInputFormat<K, V>
public InputSplit[] getSplits(JobConf job, int numSplits) public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException { throws IOException {
List<org.apache.hadoop.mapreduce.InputSplit> newStyleSplits = List<org.apache.hadoop.mapreduce.InputSplit> newStyleSplits =
super.getSplits(new Job(job)); super.getSplits(Job.getInstance(job));
InputSplit[] ret = new InputSplit[newStyleSplits.size()]; InputSplit[] ret = new InputSplit[newStyleSplits.size()];
for(int pos = 0; pos < newStyleSplits.size(); ++pos) { for(int pos = 0; pos < newStyleSplits.size(); ++pos) {
org.apache.hadoop.mapreduce.lib.input.CombineFileSplit newStyleSplit = org.apache.hadoop.mapreduce.lib.input.CombineFileSplit newStyleSplit =
@ -129,7 +129,7 @@ public abstract class CombineFileInputFormat<K, V>
* @throws IOException if zero items. * @throws IOException if zero items.
*/ */
protected FileStatus[] listStatus(JobConf job) throws IOException { protected FileStatus[] listStatus(JobConf job) throws IOException {
List<FileStatus> result = super.listStatus(new Job(job)); List<FileStatus> result = super.listStatus(Job.getInstance(job));
return result.toArray(new FileStatus[result.size()]); return result.toArray(new FileStatus[result.size()]);
} }

View File

@ -46,7 +46,7 @@ public class InputSampler<K,V> extends
public static <K,V> void writePartitionFile(JobConf job, Sampler<K,V> sampler) public static <K,V> void writePartitionFile(JobConf job, Sampler<K,V> sampler)
throws IOException, ClassNotFoundException, InterruptedException { throws IOException, ClassNotFoundException, InterruptedException {
writePartitionFile(new Job(job), sampler); writePartitionFile(Job.getInstance(job), sampler);
} }
/** /**
* Interface to sample using an {@link org.apache.hadoop.mapred.InputFormat}. * Interface to sample using an {@link org.apache.hadoop.mapred.InputFormat}.

View File

@ -177,7 +177,7 @@ public class DBInputFormat<T extends DBWritable>
/** {@inheritDoc} */ /** {@inheritDoc} */
public InputSplit[] getSplits(JobConf job, int chunks) throws IOException { public InputSplit[] getSplits(JobConf job, int chunks) throws IOException {
List<org.apache.hadoop.mapreduce.InputSplit> newSplits = List<org.apache.hadoop.mapreduce.InputSplit> newSplits =
super.getSplits(new Job(job)); super.getSplits(Job.getInstance(job));
InputSplit[] ret = new InputSplit[newSplits.size()]; InputSplit[] ret = new InputSplit[newSplits.size()];
int i = 0; int i = 0;
for (org.apache.hadoop.mapreduce.InputSplit s : newSplits) { for (org.apache.hadoop.mapreduce.InputSplit s : newSplits) {

View File

@ -120,7 +120,7 @@ public class Job extends JobContextImpl implements JobContext {
*/ */
@Deprecated @Deprecated
public Job() throws IOException { public Job() throws IOException {
this(new Configuration()); this(new JobConf(new Configuration()));
} }
/** /**
@ -136,7 +136,7 @@ public class Job extends JobContextImpl implements JobContext {
*/ */
@Deprecated @Deprecated
public Job(Configuration conf, String jobName) throws IOException { public Job(Configuration conf, String jobName) throws IOException {
this(conf); this(new JobConf(conf));
setJobName(jobName); setJobName(jobName);
} }

View File

@ -164,7 +164,7 @@ public class ValueAggregatorJob {
conf.set(MRJobConfig.JAR, userJarFile); conf.set(MRJobConfig.JAR, userJarFile);
} }
Job theJob = new Job(conf); Job theJob = Job.getInstance(conf);
if (userJarFile == null) { if (userJarFile == null) {
theJob.setJarByClass(ValueAggregator.class); theJob.setJarByClass(ValueAggregator.class);
} }

View File

@ -53,7 +53,7 @@ public class DelegatingInputFormat<K, V> extends InputFormat<K, V> {
public List<InputSplit> getSplits(JobContext job) public List<InputSplit> getSplits(JobContext job)
throws IOException, InterruptedException { throws IOException, InterruptedException {
Configuration conf = job.getConfiguration(); Configuration conf = job.getConfiguration();
Job jobCopy =new Job(conf); Job jobCopy = Job.getInstance(conf);
List<InputSplit> splits = new ArrayList<InputSplit>(); List<InputSplit> splits = new ArrayList<InputSplit>();
Map<Path, InputFormat> formatMap = Map<Path, InputFormat> formatMap =
MultipleInputs.getInputFormatMap(job); MultipleInputs.getInputFormatMap(job);

View File

@ -84,7 +84,7 @@ public class ControlledJob {
* @throws IOException * @throws IOException
*/ */
public ControlledJob(Configuration conf) throws IOException { public ControlledJob(Configuration conf) throws IOException {
this(new Job(conf), null); this(Job.getInstance(conf), null);
} }
@Override @Override

View File

@ -323,7 +323,7 @@ public abstract static class Node extends ComposableInputFormat {
} }
private Configuration getConf(Configuration jconf) throws IOException { private Configuration getConf(Configuration jconf) throws IOException {
Job job = new Job(jconf); Job job = Job.getInstance(jconf);
FileInputFormat.setInputPaths(job, indir); FileInputFormat.setInputPaths(job, indir);
return job.getConfiguration(); return job.getConfiguration();
} }

View File

@ -503,7 +503,7 @@ public class MultipleOutputs<KEYOUT, VALUEOUT> {
// The following trick leverages the instantiation of a record writer via // The following trick leverages the instantiation of a record writer via
// the job thus supporting arbitrary output formats. // the job thus supporting arbitrary output formats.
Job job = new Job(context.getConfiguration()); Job job = Job.getInstance(context.getConfiguration());
job.setOutputFormatClass(getNamedOutputFormatClass(context, nameOutput)); job.setOutputFormatClass(getNamedOutputFormatClass(context, nameOutput));
job.setOutputKeyClass(getNamedOutputKeyClass(context, nameOutput)); job.setOutputKeyClass(getNamedOutputKeyClass(context, nameOutput));
job.setOutputValueClass(getNamedOutputValueClass(context, nameOutput)); job.setOutputValueClass(getNamedOutputValueClass(context, nameOutput));

View File

@ -348,7 +348,7 @@ public class InputSampler<K,V> extends Configured implements Tool {
* Configures a JobConf instance and calls {@link #writePartitionFile}. * Configures a JobConf instance and calls {@link #writePartitionFile}.
*/ */
public int run(String[] args) throws Exception { public int run(String[] args) throws Exception {
Job job = new Job(getConf()); Job job = Job.getInstance(getConf());
ArrayList<String> otherArgs = new ArrayList<String>(); ArrayList<String> otherArgs = new ArrayList<String>();
Sampler<K,V> sampler = null; Sampler<K,V> sampler = null;
for(int i=0; i < args.length; ++i) { for(int i=0; i < args.length; ++i) {

View File

@ -83,7 +83,7 @@ public class TotalOrderPartitioner<K,V>
? FileSystem.getLocal(conf) // assume in DistributedCache ? FileSystem.getLocal(conf) // assume in DistributedCache
: partFile.getFileSystem(conf); : partFile.getFileSystem(conf);
Job job = new Job(conf); Job job = Job.getInstance(conf);
Class<K> keyClass = (Class<K>)job.getMapOutputKeyClass(); Class<K> keyClass = (Class<K>)job.getMapOutputKeyClass();
K[] splitPoints = readPartitions(fs, partFile, keyClass, conf); K[] splitPoints = readPartitions(fs, partFile, keyClass, conf);
if (splitPoints.length != job.getNumReduceTasks() - 1) { if (splitPoints.length != job.getNumReduceTasks() - 1) {

View File

@ -65,7 +65,7 @@ public class RandomTextWriterJob extends Configured implements Tool {
} }
conf.setInt(MRJobConfig.NUM_MAPS, numMaps); conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJarByClass(RandomTextWriterJob.class); job.setJarByClass(RandomTextWriterJob.class);
job.setJobName("random-text-writer"); job.setJobName("random-text-writer");

View File

@ -185,7 +185,7 @@ public class TestMiniMRChildTask {
// Launch job with default option for temp dir. // Launch job with default option for temp dir.
// i.e. temp dir is ./tmp // i.e. temp dir is ./tmp
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.addFileToClassPath(APP_JAR); job.addFileToClassPath(APP_JAR);
job.setJarByClass(TestMiniMRChildTask.class); job.setJarByClass(TestMiniMRChildTask.class);
job.setMaxMapAttempts(1); // speed up failures job.setMaxMapAttempts(1); // speed up failures
@ -537,7 +537,7 @@ public class TestMiniMRChildTask {
conf.set(mapTaskJavaOptsKey, mapTaskJavaOpts); conf.set(mapTaskJavaOptsKey, mapTaskJavaOpts);
conf.set(reduceTaskJavaOptsKey, reduceTaskJavaOpts); conf.set(reduceTaskJavaOptsKey, reduceTaskJavaOpts);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.addFileToClassPath(APP_JAR); job.addFileToClassPath(APP_JAR);
job.setJarByClass(TestMiniMRChildTask.class); job.setJarByClass(TestMiniMRChildTask.class);
job.setMaxMapAttempts(1); // speed up failures job.setMaxMapAttempts(1); // speed up failures

View File

@ -187,7 +187,7 @@ public class TestMiniMRClientCluster {
} }
public static Job createJob() throws IOException { public static Job createJob() throws IOException {
final Job baseJob = new Job(mrCluster.getConfig()); final Job baseJob = Job.getInstance(mrCluster.getConfig());
baseJob.setOutputKeyClass(Text.class); baseJob.setOutputKeyClass(Text.class);
baseJob.setOutputValueClass(IntWritable.class); baseJob.setOutputValueClass(IntWritable.class);
baseJob.setMapperClass(MyMapper.class); baseJob.setMapperClass(MyMapper.class);

View File

@ -231,8 +231,7 @@ public class LargeSorter extends Configured implements Tool {
conf.setInt(MRJobConfig.MAP_MEMORY_MB, mapMb); conf.setInt(MRJobConfig.MAP_MEMORY_MB, mapMb);
conf.set(MRJobConfig.MAP_JAVA_OPTS, "-Xmx" + (mapMb - 200) + "m"); conf.set(MRJobConfig.MAP_JAVA_OPTS, "-Xmx" + (mapMb - 200) + "m");
@SuppressWarnings("deprecation") Job job = Job.getInstance(conf);
Job job = new Job(conf);
job.setJarByClass(LargeSorter.class); job.setJarByClass(LargeSorter.class);
job.setJobName("large-sorter"); job.setJobName("large-sorter");
FileOutputFormat.setOutputPath(job, outDir); FileOutputFormat.setOutputPath(job, outDir);

View File

@ -195,7 +195,7 @@ public class RandomTextWriter extends Configured implements Tool {
} }
conf.setInt(MRJobConfig.NUM_MAPS, numMaps); conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJarByClass(RandomTextWriter.class); job.setJarByClass(RandomTextWriter.class);
job.setJobName("random-text-writer"); job.setJobName("random-text-writer");

View File

@ -261,7 +261,7 @@ public class RandomWriter extends Configured implements Tool {
} }
conf.setInt(MRJobConfig.NUM_MAPS, numMaps); conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJarByClass(RandomWriter.class); job.setJarByClass(RandomWriter.class);
job.setJobName("random-writer"); job.setJobName("random-writer");

View File

@ -547,7 +547,7 @@ public class TestMRJobs {
myConf.setInt(MRJobConfig.NUM_MAPS, 1); myConf.setInt(MRJobConfig.NUM_MAPS, 1);
myConf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, 2); //reduce the number of attempts myConf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, 2); //reduce the number of attempts
Job job = new Job(myConf); Job job = Job.getInstance(myConf);
job.setJarByClass(FailingMapper.class); job.setJarByClass(FailingMapper.class);
job.setJobName("failmapper"); job.setJobName("failmapper");

View File

@ -77,7 +77,7 @@ public class UserNamePermission
{ {
Path outDir = new Path("output"); Path outDir = new Path("output");
Configuration conf = new Configuration(); Configuration conf = new Configuration();
Job job = new Job(conf, "user name check"); Job job = Job.getInstance(conf, "user name check");
job.setJarByClass(UserNamePermission.class); job.setJarByClass(UserNamePermission.class);

View File

@ -315,7 +315,7 @@ public class BaileyBorweinPlouffe extends Configured implements Tool {
/** Create and setup a job */ /** Create and setup a job */
private static Job createJob(String name, Configuration conf private static Job createJob(String name, Configuration conf
) throws IOException { ) throws IOException {
final Job job = new Job(conf, NAME + "_" + name); final Job job = Job.getInstance(conf, NAME + "_" + name);
final Configuration jobconf = job.getConfiguration(); final Configuration jobconf = job.getConfiguration();
job.setJarByClass(BaileyBorweinPlouffe.class); job.setJarByClass(BaileyBorweinPlouffe.class);

View File

@ -56,7 +56,7 @@ public class Grep extends Configured implements Tool {
if (args.length == 4) if (args.length == 4)
conf.set(RegexMapper.GROUP, args[3]); conf.set(RegexMapper.GROUP, args[3]);
Job grepJob = new Job(conf); Job grepJob = Job.getInstance(conf);
try { try {
@ -77,7 +77,7 @@ public class Grep extends Configured implements Tool {
grepJob.waitForCompletion(true); grepJob.waitForCompletion(true);
Job sortJob = new Job(conf); Job sortJob = Job.getInstance(conf);
sortJob.setJobName("grep-sort"); sortJob.setJobName("grep-sort");
sortJob.setJarByClass(Grep.class); sortJob.setJarByClass(Grep.class);

View File

@ -89,7 +89,7 @@ public class Join extends Configured implements Tool {
num_reduces = cluster.getTaskTrackers() * num_reduces = cluster.getTaskTrackers() *
Integer.parseInt(join_reduces); Integer.parseInt(join_reduces);
} }
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJobName("join"); job.setJobName("join");
job.setJarByClass(Sort.class); job.setJarByClass(Sort.class);

View File

@ -229,7 +229,7 @@ public class MultiFileWordCount extends Configured implements Tool {
return 2; return 2;
} }
Job job = new Job(getConf()); Job job = Job.getInstance(getConf());
job.setJobName("MultiFileWordCount"); job.setJobName("MultiFileWordCount");
job.setJarByClass(MultiFileWordCount.class); job.setJarByClass(MultiFileWordCount.class);

View File

@ -248,7 +248,7 @@ public class QuasiMonteCarlo extends Configured implements Tool {
public static BigDecimal estimatePi(int numMaps, long numPoints, public static BigDecimal estimatePi(int numMaps, long numPoints,
Path tmpDir, Configuration conf Path tmpDir, Configuration conf
) throws IOException, ClassNotFoundException, InterruptedException { ) throws IOException, ClassNotFoundException, InterruptedException {
Job job = new Job(conf); Job job = Job.getInstance(conf);
//setup job conf //setup job conf
job.setJobName(QuasiMonteCarlo.class.getSimpleName()); job.setJobName(QuasiMonteCarlo.class.getSimpleName());
job.setJarByClass(QuasiMonteCarlo.class); job.setJarByClass(QuasiMonteCarlo.class);

View File

@ -195,7 +195,7 @@ public class RandomTextWriter extends Configured implements Tool {
} }
conf.setInt(MRJobConfig.NUM_MAPS, numMaps); conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJarByClass(RandomTextWriter.class); job.setJarByClass(RandomTextWriter.class);
job.setJobName("random-text-writer"); job.setJobName("random-text-writer");

View File

@ -261,7 +261,7 @@ public class RandomWriter extends Configured implements Tool {
} }
conf.setInt(MRJobConfig.NUM_MAPS, numMaps); conf.setInt(MRJobConfig.NUM_MAPS, numMaps);
Job job = new Job(conf); Job job = Job.getInstance(conf);
job.setJarByClass(RandomWriter.class); job.setJarByClass(RandomWriter.class);
job.setJobName("random-writer"); job.setJobName("random-writer");

View File

@ -214,7 +214,7 @@ public class SecondarySort {
System.err.println("Usage: secondarysort <in> <out>"); System.err.println("Usage: secondarysort <in> <out>");
System.exit(2); System.exit(2);
} }
Job job = new Job(conf, "secondary sort"); Job job = Job.getInstance(conf, "secondary sort");
job.setJarByClass(SecondarySort.class); job.setJarByClass(SecondarySort.class);
job.setMapperClass(MapClass.class); job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class); job.setReducerClass(Reduce.class);

View File

@ -132,7 +132,7 @@ public class Sort<K,V> extends Configured implements Tool {
} }
} }
// Set user-supplied (possibly default) job configs // Set user-supplied (possibly default) job configs
job = new Job(conf); job = Job.getInstance(conf);
job.setJobName("sorter"); job.setJobName("sorter");
job.setJarByClass(Sort.class); job.setJarByClass(Sort.class);

View File

@ -72,7 +72,7 @@ public class WordCount {
System.err.println("Usage: wordcount <in> [<in>...] <out>"); System.err.println("Usage: wordcount <in> [<in>...] <out>");
System.exit(2); System.exit(2);
} }
Job job = new Job(conf, "word count"); Job job = Job.getInstance(conf, "word count");
job.setJarByClass(WordCount.class); job.setJarByClass(WordCount.class);
job.setMapperClass(TokenizerMapper.class); job.setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class); job.setCombinerClass(IntSumReducer.class);

View File

@ -172,8 +172,7 @@ public class WordMean extends Configured implements Tool {
Configuration conf = getConf(); Configuration conf = getConf();
@SuppressWarnings("deprecation") Job job = Job.getInstance(conf, "word mean");
Job job = new Job(conf, "word mean");
job.setJarByClass(WordMean.class); job.setJarByClass(WordMean.class);
job.setMapperClass(WordMeanMapper.class); job.setMapperClass(WordMeanMapper.class);
job.setCombinerClass(WordMeanReducer.class); job.setCombinerClass(WordMeanReducer.class);

View File

@ -181,8 +181,7 @@ public class WordMedian extends Configured implements Tool {
setConf(new Configuration()); setConf(new Configuration());
Configuration conf = getConf(); Configuration conf = getConf();
@SuppressWarnings("deprecation") Job job = Job.getInstance(conf, "word median");
Job job = new Job(conf, "word median");
job.setJarByClass(WordMedian.class); job.setJarByClass(WordMedian.class);
job.setMapperClass(WordMedianMapper.class); job.setMapperClass(WordMedianMapper.class);
job.setCombinerClass(WordMedianReducer.class); job.setCombinerClass(WordMedianReducer.class);

View File

@ -189,8 +189,7 @@ public class WordStandardDeviation extends Configured implements Tool {
Configuration conf = getConf(); Configuration conf = getConf();
@SuppressWarnings("deprecation") Job job = Job.getInstance(conf, "word stddev");
Job job = new Job(conf, "word stddev");
job.setJarByClass(WordStandardDeviation.class); job.setJarByClass(WordStandardDeviation.class);
job.setMapperClass(WordStandardDeviationMapper.class); job.setMapperClass(WordStandardDeviationMapper.class);
job.setCombinerClass(WordStandardDeviationReducer.class); job.setCombinerClass(WordStandardDeviationReducer.class);

View File

@ -198,7 +198,7 @@ public class DistributedPentomino extends Configured implements Tool {
Path input = new Path(output + "_input"); Path input = new Path(output + "_input");
FileSystem fileSys = FileSystem.get(conf); FileSystem fileSys = FileSystem.get(conf);
try { try {
Job job = new Job(conf); Job job = Job.getInstance(conf);
FileInputFormat.setInputPaths(job, input); FileInputFormat.setInputPaths(job, input);
FileOutputFormat.setOutputPath(job, output); FileOutputFormat.setOutputPath(job, output);
job.setJarByClass(PentMap.class); job.setJarByClass(PentMap.class);

View File

@ -432,7 +432,8 @@ public final class DistSum extends Configured implements Tool {
/** Create a job */ /** Create a job */
private Job createJob(String name, Summation sigma) throws IOException { private Job createJob(String name, Summation sigma) throws IOException {
final Job job = new Job(getConf(), parameters.remoteDir + "/" + name); final Job job = Job.getInstance(getConf(), parameters.remoteDir + "/" +
name);
final Configuration jobconf = job.getConfiguration(); final Configuration jobconf = job.getConfiguration();
job.setJarByClass(DistSum.class); job.setJarByClass(DistSum.class);
jobconf.setInt(N_PARTS, parameters.nParts); jobconf.setInt(N_PARTS, parameters.nParts);

View File

@ -114,8 +114,8 @@ abstract class GridmixJob implements Callable<Job>, Delayed {
String jobId = null == jobdesc.getJobID() String jobId = null == jobdesc.getJobID()
? "<unknown>" ? "<unknown>"
: jobdesc.getJobID().toString(); : jobdesc.getJobID().toString();
Job ret = new Job(conf, Job ret = Job.getInstance(conf, nameFormat.get().format("%06d", seq)
nameFormat.get().format("%06d", seq).toString()); .toString());
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq); ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
ret.getConfiguration().set(Gridmix.ORIGINAL_JOB_ID, jobId); ret.getConfiguration().set(Gridmix.ORIGINAL_JOB_ID, jobId);
@ -343,7 +343,7 @@ abstract class GridmixJob implements Callable<Job>, Delayed {
try { try {
job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() { job = this.ugi.doAs(new PrivilegedExceptionAction<Job>() {
public Job run() throws IOException { public Job run() throws IOException {
Job ret = new Job(conf, name); Job ret = Job.getInstance(conf, name);
ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq); ret.getConfiguration().setInt(GRIDMIX_JOB_SEQ, seq);
setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE)); setJobQueue(ret, conf.get(GRIDMIX_DEFAULT_QUEUE));
return ret; return ret;

View File

@ -157,7 +157,7 @@ public class TestCompressionEmulationUtils {
// get the local job runner // get the local job runner
conf.setInt(MRJobConfig.NUM_MAPS, 1); conf.setInt(MRJobConfig.NUM_MAPS, 1);
Job job = new Job(conf); Job job = Job.getInstance(conf);
CompressionEmulationUtil.configure(job); CompressionEmulationUtil.configure(job);
job.setInputFormatClass(CustomInputFormat.class); job.setInputFormatClass(CustomInputFormat.class);

View File

@ -109,7 +109,7 @@ public class TestStreamXmlRecordReader {
@Test @Test
public void testStreamXmlRecordReader() throws Exception { public void testStreamXmlRecordReader() throws Exception {
Job job = new Job(); Job job = Job.getInstance();
Configuration conf = job.getConfiguration(); Configuration conf = job.getConfiguration();
job.setJarByClass(TestStreamXmlRecordReader.class); job.setJarByClass(TestStreamXmlRecordReader.class);
job.setMapperClass(Mapper.class); job.setMapperClass(Mapper.class);