mirror of https://github.com/apache/druid.git
Merge pull request #1275 from infynyxx/update_deprecated_job_initialization
use Job.getInstance() to fix deprecated warnings
This commit is contained in:
commit
700dc6fbc0
|
@ -46,8 +46,6 @@ import org.apache.hadoop.io.Writable;
|
|||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.Partitioner;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||
import org.joda.time.DateTime;
|
||||
|
@ -84,7 +82,7 @@ public class DetermineHashedPartitionsJob implements Jobby
|
|||
* in the final segment.
|
||||
*/
|
||||
long startTime = System.currentTimeMillis();
|
||||
final Job groupByJob = new Job(
|
||||
final Job groupByJob = Job.getInstance(
|
||||
new Configuration(),
|
||||
String.format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals())
|
||||
);
|
||||
|
|
|
@ -121,7 +121,7 @@ public class DeterminePartitionsJob implements Jobby
|
|||
}
|
||||
|
||||
if (!config.getPartitionsSpec().isAssumeGrouped()) {
|
||||
final Job groupByJob = new Job(
|
||||
final Job groupByJob = Job.getInstance(
|
||||
new Configuration(),
|
||||
String.format("%s-determine_partitions_groupby-%s", config.getDataSource(), config.getIntervals())
|
||||
);
|
||||
|
@ -157,7 +157,7 @@ public class DeterminePartitionsJob implements Jobby
|
|||
/*
|
||||
* Read grouped data and determine appropriate partitions.
|
||||
*/
|
||||
final Job dimSelectionJob = new Job(
|
||||
final Job dimSelectionJob = Job.getInstance(
|
||||
new Configuration(),
|
||||
String.format("%s-determine_partitions_dimselection-%s", config.getDataSource(), config.getIntervals())
|
||||
);
|
||||
|
|
|
@ -152,7 +152,7 @@ public class IndexGeneratorJob implements Jobby
|
|||
public boolean run()
|
||||
{
|
||||
try {
|
||||
Job job = new Job(
|
||||
Job job = Job.getInstance(
|
||||
new Configuration(),
|
||||
String.format("%s-index-generator-%s", config.getDataSource(), config.getIntervals())
|
||||
);
|
||||
|
|
|
@ -112,7 +112,7 @@ public class JobHelper
|
|||
{
|
||||
// config.addInputPaths() can have side-effects ( boo! :( ), so this stuff needs to be done before anything else
|
||||
try {
|
||||
Job job = new Job(
|
||||
Job job = Job.getInstance(
|
||||
new Configuration(),
|
||||
String.format("%s-determine_partitions-%s", config.getDataSource(), config.getIntervals())
|
||||
);
|
||||
|
|
|
@ -60,7 +60,7 @@ public class StaticPathSpecTest
|
|||
StaticPathSpec pathSpec = (StaticPathSpec)jsonMapper.readValue(sb.toString(), PathSpec.class);
|
||||
Assert.assertEquals(inputFormat, pathSpec.getInputFormat());
|
||||
|
||||
Job job = new Job();
|
||||
Job job = Job.getInstance();
|
||||
pathSpec.addInputPaths(null, job);
|
||||
Assert.assertEquals(
|
||||
"file:" + path,
|
||||
|
|
Loading…
Reference in New Issue