workaround to pass down bitmap type to map-reduce tasks

This commit is contained in:
Xavier Léauté 2015-01-02 17:26:59 -08:00
parent f17c59e21b
commit f1375b0bfb
2 changed files with 21 additions and 0 deletions

View File

@ -67,6 +67,7 @@ import java.io.IOException;
import java.nio.charset.Charset;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
@ -83,6 +84,9 @@ public class HadoopDruidIndexerConfig
public static final Joiner tabJoiner = Joiner.on("\t");
public static final ObjectMapper jsonMapper;
// workaround to pass down druid.processing.bitmap.type, see IndexGeneratorJob.run()
protected static final Properties properties;
private static final String DEFAULT_WORKING_PATH = "/tmp/druid-indexing";
static {
@ -102,6 +106,7 @@ public class HadoopDruidIndexerConfig
)
);
jsonMapper = injector.getInstance(ObjectMapper.class);
properties = injector.getInstance(Properties.class);
}
public static enum IndexJobCounters

View File

@ -21,6 +21,7 @@ package io.druid.indexer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
@ -44,6 +45,8 @@ import io.druid.segment.LoggingProgressIndicator;
import io.druid.segment.ProgressIndicator;
import io.druid.segment.QueryableIndex;
import io.druid.segment.SegmentUtils;
import io.druid.segment.data.BitmapSerde;
import io.druid.segment.data.BitmapSerdeFactory;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OffheapIncrementalIndex;
@ -180,6 +183,19 @@ public class IndexGeneratorJob implements Jobby
config.addInputPaths(job);
config.addJobProperties(job);
// hack to get druid.processing.bitmap property passed down to hadoop job.
// once IndexIO doesn't rely on globally injected properties, we can move this into the HadoopTuningConfig.
final String bitmapProperty = "druid.processing.bitmap.type";
final String bitmapType = HadoopDruidIndexerConfig.properties.getProperty(bitmapProperty);
if(bitmapType != null) {
for(String property : new String[] {"mapreduce.reduce.java.opts", "mapreduce.map.java.opts"}) {
// prepend property to allow overriding using hadoop.xxx properties by JobHelper.injectSystemProperties above
String value = Strings.nullToEmpty(job.getConfiguration().get(property));
job.getConfiguration().set(property, String.format("-D%s=%s %s", bitmapProperty, bitmapType, value));
}
}
config.intoConfiguration(job);
JobHelper.setupClasspath(config, job);