mirror of https://github.com/apache/druid.git
remove logging and use new determine partition job
This commit is contained in:
parent
7ca87d59df
commit
5fd76067cd
|
@ -281,7 +281,6 @@ public class DeterminePartitionsUsingCardinalityJob implements Jobby
|
|||
}
|
||||
|
||||
final Path outPath = config.makeSegmentPartitionInfoPath(new Bucket(0, new DateTime(key.get()), 0));
|
||||
new Exception("creating output path" + outPath).printStackTrace();
|
||||
final OutputStream out = Utils.makePathAndOutputStream(
|
||||
context, outPath, config.isOverwriteFiles()
|
||||
);
|
||||
|
|
|
@ -388,7 +388,7 @@ public class HadoopDruidIndexerConfig
|
|||
return partitionsSpec.getPartitionDimension();
|
||||
}
|
||||
|
||||
public boolean partitionByDimension()
|
||||
public boolean isDeterminingPartitions()
|
||||
{
|
||||
return partitionsSpec.isDeterminingPartitions();
|
||||
}
|
||||
|
|
|
@ -72,8 +72,12 @@ public class HadoopDruidIndexerJob implements Jobby
|
|||
|
||||
ensurePaths();
|
||||
|
||||
if (config.partitionByDimension()) {
|
||||
jobs.add(new DeterminePartitionsJob(config));
|
||||
if (config.isDeterminingPartitions()) {
|
||||
if(config.getPartitionDimension() == null){
|
||||
jobs.add(new DeterminePartitionsUsingCardinalityJob(config));
|
||||
} else {
|
||||
jobs.add(new DeterminePartitionsJob(config));
|
||||
}
|
||||
}
|
||||
else {
|
||||
Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance());
|
||||
|
|
Loading…
Reference in New Issue