remove logging and use new determine partition job

This commit is contained in:
nishantmonu51 2014-01-31 13:51:38 +05:30
parent 7ca87d59df
commit 5fd76067cd
3 changed files with 7 additions and 4 deletions

View File

@ -281,7 +281,6 @@ public class DeterminePartitionsUsingCardinalityJob implements Jobby
}
final Path outPath = config.makeSegmentPartitionInfoPath(new Bucket(0, new DateTime(key.get()), 0));
new Exception("creating output path" + outPath).printStackTrace();
final OutputStream out = Utils.makePathAndOutputStream(
context, outPath, config.isOverwriteFiles()
);

View File

@ -388,7 +388,7 @@ public class HadoopDruidIndexerConfig
return partitionsSpec.getPartitionDimension();
}
public boolean partitionByDimension()
public boolean isDeterminingPartitions()
{
return partitionsSpec.isDeterminingPartitions();
}

View File

@ -72,8 +72,12 @@ public class HadoopDruidIndexerJob implements Jobby
ensurePaths();
if (config.partitionByDimension()) {
jobs.add(new DeterminePartitionsJob(config));
if (config.isDeterminingPartitions()) {
if(config.getPartitionDimension() == null){
jobs.add(new DeterminePartitionsUsingCardinalityJob(config));
} else {
jobs.add(new DeterminePartitionsJob(config));
}
}
else {
Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance());