diff --git a/docs/content/configuration/index.md b/docs/content/configuration/index.md index af1c5eb035a..4a259b1d550 100644 --- a/docs/content/configuration/index.md +++ b/docs/content/configuration/index.md @@ -23,9 +23,10 @@ Many of Druid's external dependencies can be plugged in as modules. Extensions c |--------|-----------|-------| |`druid.extensions.directory`|The root extension directory where user can put extensions related files. Druid will load extensions stored under this directory.|`extensions` (This is a relative path to Druid's working directory)| |`druid.extensions.hadoopDependenciesDir`|The root hadoop dependencies directory where user can put hadoop related dependencies files. Druid will load the dependencies based on the hadoop coordinate specified in the hadoop index task.|`hadoop-dependencies` (This is a relative path to Druid's working directory| -|`druid.extensions.hadoopContainerDruidClasspath`|Hadoop Indexing launches hadoop jobs and this configuration provides way to explicitly set the user classpath for the hadoop job. By default this is computed automatically by druid based on the druid process classpath and set of extensions. However, sometimes you might want to be explicit to resolve dependency conflicts between druid and hadoop.|null| |`druid.extensions.loadList`|A JSON array of extensions to load from extension directories by Druid. If it is not specified, its value will be `null` and Druid will load all the extensions under `druid.extensions.directory`. If its value is empty list `[]`, then no extensions will be loaded at all. It is also allowed to specify absolute path of other custom extensions not stored in the common extensions directory.|null| |`druid.extensions.searchCurrentClassloader`|This is a boolean flag that determines if Druid will search the main classloader for extensions. It defaults to true but can be turned off if you have reason to not automatically add all modules on the classpath.|true| +|`druid.extensions.hadoopContainerDruidClasspath`|Hadoop Indexing launches hadoop jobs and this configuration provides way to explicitly set the user classpath for the hadoop job. By default this is computed automatically by druid based on the druid process classpath and set of extensions. However, sometimes you might want to be explicit to resolve dependency conflicts between druid and hadoop.|null| +|`druid.extensions.addExtensionsToHadoopContainer`|Only applicable if `druid.extensions.hadoopContainerDruidClasspath` is provided. If set to true, then extensions specified in the loadList are added to hadoop container classpath. Note that when `druid.extensions.hadoopContainerDruidClasspath` is not provided then extensions are always added to hadoop container classpath.|false| ### Zookeeper We recommend just setting the base ZK path and the ZK service host, but all ZK paths that Druid uses can be overwritten to absolute paths. diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java index 669241a1331..62a7bb06141 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopTask.java @@ -25,7 +25,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Injector; - import io.druid.guice.ExtensionsConfig; import io.druid.guice.GuiceInjectors; import io.druid.indexing.common.TaskToolbox; @@ -142,11 +141,14 @@ public abstract class HadoopTask extends AbstractTask Arrays.asList(((URLClassLoader) HadoopIndexTask.class.getClassLoader()).getURLs()) ); + final List extensionURLs = Lists.newArrayList(); for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) { final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension); - jobURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs())); + extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs())); } + jobURLs.addAll(extensionURLs); + final List localClassLoaderURLs = new ArrayList<>(jobURLs); // hadoop dependencies come before druid classes because some extensions depend on them @@ -169,11 +171,16 @@ public abstract class HadoopTask extends AbstractTask hadoopContainerDruidClasspathJars = Joiner.on(File.pathSeparator).join(jobURLs); } else { - hadoopContainerDruidClasspathJars = - Joiner.on(File.pathSeparator) - .join( - Initialization.getURLsForClasspath(extensionsConfig.getHadoopContainerDruidClasspath()) - ); + List hadoopContainerURLs = Lists.newArrayList( + Initialization.getURLsForClasspath(extensionsConfig.getHadoopContainerDruidClasspath()) + ); + + if (extensionsConfig.getAddExtensionsToHadoopContainer()) { + hadoopContainerURLs.addAll(extensionURLs); + } + + hadoopContainerDruidClasspathJars = Joiner.on(File.pathSeparator) + .join(hadoopContainerURLs); } log.info("Hadoop Container Druid Classpath is set to [%s]", hadoopContainerDruidClasspathJars); diff --git a/processing/src/main/java/io/druid/guice/ExtensionsConfig.java b/processing/src/main/java/io/druid/guice/ExtensionsConfig.java index 3b6b595919c..e04e612a53d 100644 --- a/processing/src/main/java/io/druid/guice/ExtensionsConfig.java +++ b/processing/src/main/java/io/druid/guice/ExtensionsConfig.java @@ -41,6 +41,10 @@ public class ExtensionsConfig @JsonProperty private String hadoopContainerDruidClasspath = null; + //Only applicable when hadoopContainerDruidClasspath is explicitly specified. + @JsonProperty + private boolean addExtensionsToHadoopContainer = false; + @JsonProperty private List loadList; @@ -64,6 +68,11 @@ public class ExtensionsConfig return hadoopContainerDruidClasspath; } + public boolean getAddExtensionsToHadoopContainer() + { + return addExtensionsToHadoopContainer; + } + public List getLoadList() { return loadList; @@ -77,6 +86,7 @@ public class ExtensionsConfig ", directory='" + directory + '\'' + ", hadoopDependenciesDir='" + hadoopDependenciesDir + '\'' + ", hadoopContainerDruidClasspath='" + hadoopContainerDruidClasspath + '\'' + + ", addExtensionsToHadoopContainer=" + addExtensionsToHadoopContainer + ", loadList=" + loadList + '}'; }