Extensions: Option to load classes from extension jars first. (#5321)

The behavior is configurable through druid.extensions.useExtensionClassloaderFirst.
It is useful when extensions want to load a dependency different from one provided
by Druid, for example a different version of geoip or protobuf.
This commit is contained in:
Gian Merlino 2018-02-06 02:44:03 -08:00 committed by Nishant Bangarwa
parent 0db696b7c9
commit 9a62b02cb7
8 changed files with 156 additions and 22 deletions

View File

@ -25,6 +25,7 @@ Many of Druid's external dependencies can be plugged in as modules. Extensions c
|`druid.extensions.hadoopDependenciesDir`|The root hadoop dependencies directory where user can put hadoop related dependencies files. Druid will load the dependencies based on the hadoop coordinate specified in the hadoop index task.|`hadoop-dependencies` (This is a relative path to Druid's working directory|
|`druid.extensions.loadList`|A JSON array of extensions to load from extension directories by Druid. If it is not specified, its value will be `null` and Druid will load all the extensions under `druid.extensions.directory`. If its value is empty list `[]`, then no extensions will be loaded at all. It is also allowed to specify absolute path of other custom extensions not stored in the common extensions directory.|null|
|`druid.extensions.searchCurrentClassloader`|This is a boolean flag that determines if Druid will search the main classloader for extensions. It defaults to true but can be turned off if you have reason to not automatically add all modules on the classpath.|true|
|`druid.extensions.useExtensionClassloaderFirst`|This is a boolean flag that determines if Druid extensions should prefer loading classes from their own jars rather than jars bundled with Druid. If false, extensions must be compatible with classes provided by any jars bundled with Druid. If true, extensions may depend on conflicting versions.|false|
|`druid.extensions.hadoopContainerDruidClasspath`|Hadoop Indexing launches hadoop jobs and this configuration provides way to explicitly set the user classpath for the hadoop job. By default this is computed automatically by druid based on the druid process classpath and set of extensions. However, sometimes you might want to be explicit to resolve dependency conflicts between druid and hadoop.|null|
|`druid.extensions.addExtensionsToHadoopContainer`|Only applicable if `druid.extensions.hadoopContainerDruidClasspath` is provided. If set to true, then extensions specified in the loadList are added to hadoop container classpath. Note that when `druid.extensions.hadoopContainerDruidClasspath` is not provided then extensions are always added to hadoop container classpath.|false|

View File

@ -60,6 +60,16 @@
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${apache.kafka.version}</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>
<exclusion>
<groupId>net.jpountz.lz4</groupId>
<artifactId>lz4</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- Tests -->

View File

@ -147,7 +147,7 @@ public abstract class HadoopTask extends AbstractTask
final List<URL> extensionURLs = Lists.newArrayList();
for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) {
final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension);
final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false);
extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs()));
}
@ -161,7 +161,7 @@ public abstract class HadoopTask extends AbstractTask
finalHadoopDependencyCoordinates,
extensionsConfig
)) {
final ClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency);
final ClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency, false);
localClassLoaderURLs.addAll(Arrays.asList(((URLClassLoader) hadoopLoader).getURLs()));
}

View File

@ -35,6 +35,9 @@ public class ExtensionsConfig
@JsonProperty
private String directory = "extensions";
@JsonProperty
private boolean useExtensionClassloaderFirst = false;
@JsonProperty
private String hadoopDependenciesDir = "hadoop-dependencies";
@ -58,6 +61,11 @@ public class ExtensionsConfig
return directory;
}
public boolean isUseExtensionClassloaderFirst()
{
return useExtensionClassloaderFirst;
}
public String getHadoopDependenciesDir()
{
return hadoopDependenciesDir;
@ -84,6 +92,7 @@ public class ExtensionsConfig
return "ExtensionsConfig{" +
"searchCurrentClassloader=" + searchCurrentClassloader +
", directory='" + directory + '\'' +
", useExtensionClassloaderFirst=" + useExtensionClassloaderFirst +
", hadoopDependenciesDir='" + hadoopDependenciesDir + '\'' +
", hadoopContainerDruidClasspath='" + hadoopContainerDruidClasspath + '\'' +
", addExtensionsToHadoopContainer=" + addExtensionsToHadoopContainer +

View File

@ -0,0 +1,96 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.initialization;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
/**
* The ClassLoader that gets used when druid.extensions.useExtensionClassloaderFirst = true.
*/
public class ExtensionFirstClassLoader extends URLClassLoader
{
private final ClassLoader druidLoader;
public ExtensionFirstClassLoader(final URL[] urls, final ClassLoader druidLoader)
{
super(urls, null);
this.druidLoader = Preconditions.checkNotNull(druidLoader, "druidLoader");
}
@Override
public Class<?> loadClass(final String name) throws ClassNotFoundException
{
return loadClass(name, false);
}
@Override
protected Class<?> loadClass(final String name, final boolean resolve) throws ClassNotFoundException
{
synchronized (getClassLoadingLock(name)) {
Class<?> clazz = findLoadedClass(name);
if (clazz == null) {
// Try extension classloader first.
try {
clazz = findClass(name);
}
catch (ClassNotFoundException e) {
// Try the Druid classloader. Will throw ClassNotFoundException if the class can't be loaded.
return druidLoader.loadClass(name);
}
}
if (resolve) {
resolveClass(clazz);
}
return clazz;
}
}
@Override
public URL getResource(final String name)
{
final URL resourceFromExtension = super.getResource(name);
if (resourceFromExtension != null) {
return resourceFromExtension;
} else {
return druidLoader.getResource(name);
}
}
@Override
public Enumeration<URL> getResources(final String name) throws IOException
{
final List<URL> urls = new ArrayList<>();
Iterators.addAll(urls, Iterators.forEnumeration(super.getResources(name)));
Iterators.addAll(urls, Iterators.forEnumeration(druidLoader.getResources(name)));
return Iterators.asEnumeration(urls.iterator());
}
}

View File

@ -176,7 +176,10 @@ public class Initialization
for (File extension : getExtensionFilesToLoad(extensionsConfig)) {
log.info("Loading extension [%s] for class [%s]", extension.getName(), serviceClass);
try {
final URLClassLoader loader = getClassLoaderForExtension(extension);
final URLClassLoader loader = getClassLoaderForExtension(
extension,
extensionsConfig.isUseExtensionClassloaderFirst()
);
ServiceLoader.load(serviceClass, loader).forEach(impl -> tryAdd(impl, "local file system"));
}
catch (Exception e) {
@ -287,25 +290,40 @@ public class Initialization
* @param extension The File instance of the extension we want to load
*
* @return a URLClassLoader that loads all the jars on which the extension is dependent
*
* @throws MalformedURLException
*/
public static URLClassLoader getClassLoaderForExtension(File extension) throws MalformedURLException
public static URLClassLoader getClassLoaderForExtension(File extension, boolean useExtensionClassloaderFirst)
{
return loadersMap.computeIfAbsent(
extension,
theExtension -> makeClassLoaderForExtension(theExtension, useExtensionClassloaderFirst)
);
}
private static URLClassLoader makeClassLoaderForExtension(
final File extension,
final boolean useExtensionClassloaderFirst
)
{
URLClassLoader loader = loadersMap.get(extension);
if (loader == null) {
final Collection<File> jars = FileUtils.listFiles(extension, new String[]{"jar"}, false);
final URL[] urls = new URL[jars.size()];
try {
int i = 0;
for (File jar : jars) {
final URL url = jar.toURI().toURL();
log.info("added URL[%s]", url);
log.info("added URL[%s] for extension[%s]", url, extension.getName());
urls[i++] = url;
}
loadersMap.putIfAbsent(extension, new URLClassLoader(urls, Initialization.class.getClassLoader()));
loader = loadersMap.get(extension);
}
return loader;
catch (MalformedURLException e) {
throw new RuntimeException(e);
}
if (useExtensionClassloaderFirst) {
return new ExtensionFirstClassLoader(urls, Initialization.class.getClassLoader());
} else {
return new URLClassLoader(urls, Initialization.class.getClassLoader());
}
}
public static List<URL> getURLsForClasspath(String cp)

View File

@ -160,7 +160,7 @@ public class InitializationTest
a_jar.createNewFile();
b_jar.createNewFile();
c_jar.createNewFile();
final URLClassLoader loader = Initialization.getClassLoaderForExtension(some_extension_dir);
final URLClassLoader loader = Initialization.getClassLoaderForExtension(some_extension_dir, false);
final URL[] expectedURLs = new URL[]{a_jar.toURI().toURL(), b_jar.toURI().toURL(), c_jar.toURI().toURL()};
final URL[] actualURLs = loader.getURLs();
Arrays.sort(
@ -451,8 +451,8 @@ public class InitializationTest
Assert.assertTrue(jar1.createNewFile());
Assert.assertTrue(jar2.createNewFile());
final ClassLoader classLoader1 = Initialization.getClassLoaderForExtension(extension1);
final ClassLoader classLoader2 = Initialization.getClassLoaderForExtension(extension2);
final ClassLoader classLoader1 = Initialization.getClassLoaderForExtension(extension1, false);
final ClassLoader classLoader2 = Initialization.getClassLoaderForExtension(extension2, false);
Assert.assertArrayEquals(new URL[]{jar1.toURL()}, ((URLClassLoader) classLoader1).getURLs());
Assert.assertArrayEquals(new URL[]{jar2.toURL()}, ((URLClassLoader) classLoader2).getURLs());

View File

@ -81,7 +81,7 @@ public class CliHadoopIndexer implements Runnable
final List<URL> extensionURLs = Lists.newArrayList();
for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) {
final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension);
final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false);
extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs()));
}
@ -92,7 +92,7 @@ public class CliHadoopIndexer implements Runnable
driverURLs.addAll(nonHadoopURLs);
// put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts
for (File hadoopDependency : Initialization.getHadoopDependencyFilesToLoad(allCoordinates, extensionsConfig)) {
final ClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency);
final ClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency, false);
driverURLs.addAll(Arrays.asList(((URLClassLoader) hadoopLoader).getURLs()));
}