diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java index 07386469ae4..946725a884f 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java @@ -26,6 +26,8 @@ import io.druid.timeline.DataSegment; import java.io.File; import java.io.IOException; import java.net.URI; +import java.util.Collections; +import java.util.List; import java.util.Map; public interface DataSegmentPusher @@ -45,6 +47,15 @@ public interface DataSegmentPusher return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName); } + /** + * Property prefixes that should be added to the "allowedHadoopPrefix" config for passing down to Hadoop jobs. These + * should be property prefixes like "druid.xxx", which means to include "druid.xxx" and "druid.xxx.*". + */ + default List getAllowedPropertyPrefixesForHadoop() + { + return Collections.emptyList(); + } + // Note: storage directory structure format = .../dataSource/interval/version/partitionNumber/ // If above format is ever changed, make sure to change it appropriately in other places // e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java index 0716c2d157e..97bc1a0167a 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java @@ -21,6 +21,7 @@ package io.druid.storage.azure; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.microsoft.azure.storage.StorageException; @@ -37,6 +38,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; +import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -73,6 +75,12 @@ public class AzureDataSegmentPusher implements DataSegmentPusher return null; } + @Override + public List getAllowedPropertyPrefixesForHadoop() + { + return ImmutableList.of("druid.azure"); + } + public File createSegmentDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment) throws IOException { diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java index 7f182ae9085..33be247a22a 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java @@ -24,6 +24,7 @@ import com.google.api.client.http.InputStreamContent; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Strings; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; @@ -38,6 +39,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.net.URI; import java.nio.file.Files; +import java.util.List; import java.util.Map; public class GoogleDataSegmentPusher implements DataSegmentPusher @@ -75,6 +77,12 @@ public class GoogleDataSegmentPusher implements DataSegmentPusher return StringUtils.format("gs://%s/%s", config.getBucket(), config.getPrefix()); } + @Override + public List getAllowedPropertyPrefixesForHadoop() + { + return ImmutableList.of("druid.google"); + } + public File createDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment) throws IOException { diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java index ac79fa7cbec..1100287cb96 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java @@ -21,6 +21,7 @@ package io.druid.storage.s3; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; @@ -38,6 +39,7 @@ import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.file.Files; +import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -79,6 +81,12 @@ public class S3DataSegmentPusher implements DataSegmentPusher return getPathForHadoop(); } + @Override + public List getAllowedPropertyPrefixesForHadoop() + { + return ImmutableList.of("druid.s3"); + } + @Override public DataSegment push(final File indexFilesDir, final DataSegment inSegment) throws IOException { diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index 851ecd68ee8..3c13c62a4de 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -74,6 +74,7 @@ import java.io.InputStreamReader; import java.io.Reader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -259,7 +260,13 @@ public class HadoopDruidIndexerConfig } this.rollupGran = spec.getDataSchema().getGranularitySpec().getQueryGranularity(); - this.allowedHadoopPrefix = spec.getTuningConfig().getAllowedHadoopPrefix(); + + // User-specified list plus our additional bonus list. + this.allowedHadoopPrefix = new ArrayList<>(); + this.allowedHadoopPrefix.add("druid.storage"); + this.allowedHadoopPrefix.add("druid.javascript"); + this.allowedHadoopPrefix.addAll(DATA_SEGMENT_PUSHER.getAllowedPropertyPrefixesForHadoop()); + this.allowedHadoopPrefix.addAll(spec.getTuningConfig().getUserAllowedHadoopPrefix()); } @JsonProperty(value = "spec") diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java index de5c9156ed3..100da12068c 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java @@ -137,9 +137,7 @@ public class HadoopTuningConfig implements TuningConfig this.forceExtendableShardSpecs = forceExtendableShardSpecs; Preconditions.checkArgument(this.numBackgroundPersistThreads >= 0, "Not support persistBackgroundCount < 0"); this.useExplicitVersion = useExplicitVersion; - this.allowedHadoopPrefix = allowedHadoopPrefix == null - ? ImmutableList.of("druid.storage.", "druid.javascript.") - : allowedHadoopPrefix; + this.allowedHadoopPrefix = allowedHadoopPrefix == null ? ImmutableList.of() : allowedHadoopPrefix; } @JsonProperty @@ -323,9 +321,10 @@ public class HadoopTuningConfig implements TuningConfig ); } - @JsonProperty - public List getAllowedHadoopPrefix() + @JsonProperty("allowedHadoopPrefix") + public List getUserAllowedHadoopPrefix() { + // Just the user-specified list. More are added in HadoopDruidIndexerConfig. return allowedHadoopPrefix; } } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index 0e9f0eb25d4..d03fe632f19 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -313,7 +313,7 @@ public class JobHelper for (String propName : System.getProperties().stringPropertyNames()) { for (String prefix : listOfAllowedPrefix) { - if (propName.startsWith(prefix)) { + if (propName.equals(prefix) || propName.startsWith(prefix + ".")) { mapJavaOpts = StringUtils.format("%s -D%s=%s", mapJavaOpts, propName, System.getProperty(propName)); reduceJavaOpts = StringUtils.format("%s -D%s=%s", reduceJavaOpts, propName, System.getProperty(propName)); break;