Exclude unneeded hadoop transitive dependencies (#8962)

* Exclude unneeded hadoop transitive dependencies

These dependencies are provided by core:
- com.squareup.okhttp:okhttp
- commons-beanutils:commons-beanutils
- org.apache.commons:commons-compress
- org.apache.zookepper:zookeeper

These dependencies are not needed and are excluded because they contain
security vulnerabilities:
- commons-beanutils:commons-beanutils-core
- org.codehaus.jackson:jackson-mapper-asl

* Simplify exclusions + separate unneeded/vulnerable

* Do not exclude jackson-mapper-asl
This commit is contained in:
Chi Cao Minh 2019-12-02 16:08:21 -08:00 committed by Jonathan Wei
parent 6997b167b1
commit 4b7e79a4e6
5 changed files with 277 additions and 130 deletions

View File

@ -227,6 +227,11 @@
<groupId>com.nimbusds</groupId> <groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId> <artifactId>nimbus-jose-jwt</artifactId>
</exclusion> </exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind --> <!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind -->
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>

View File

@ -137,6 +137,11 @@
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
</exclusion> </exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>
@ -245,6 +250,11 @@
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId> <artifactId>jersey-server</artifactId>
</exclusion> </exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
<exclusion> <exclusion>
<!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind --> <!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind -->
<groupId>org.codehaus.jackson</groupId> <groupId>org.codehaus.jackson</groupId>

View File

@ -166,6 +166,11 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -19,8 +19,11 @@
package org.apache.druid.cli; package org.apache.druid.cli;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import com.google.inject.Inject; import com.google.inject.Inject;
import io.airlift.airline.Command; import io.airlift.airline.Command;
import io.airlift.airline.Option; import io.airlift.airline.Option;
@ -40,7 +43,6 @@ import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.collection.CollectRequest; import org.eclipse.aether.collection.CollectRequest;
import org.eclipse.aether.graph.Dependency; import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.DependencyFilter;
import org.eclipse.aether.graph.DependencyNode; import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.repository.Authentication; import org.eclipse.aether.repository.Authentication;
import org.eclipse.aether.repository.Proxy; import org.eclipse.aether.repository.Proxy;
@ -58,7 +60,6 @@ import java.io.UnsupportedEncodingException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -72,9 +73,18 @@ public class PullDependencies implements Runnable
{ {
private static final Logger log = new Logger(PullDependencies.class); private static final Logger log = new Logger(PullDependencies.class);
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection") private static final List<String> DEFAULT_REMOTE_REPOSITORIES = ImmutableList.of(
private static final Set<String> EXCLUSIONS = new HashSet<>( "https://repo1.maven.org/maven2/"
/* );
private static final Dependencies PROVIDED_BY_CORE_DEPENDENCIES =
Dependencies.builder()
.put("com.squareup.okhttp", "okhttp")
.put("commons-beanutils", "commons-beanutils")
.put("org.apache.commons", "commons-compress")
.put("org.apache.zookeeper", "zookeeper")
.build();
/*
// It is possible that extensions will pull down a lot of jars that are either // It is possible that extensions will pull down a lot of jars that are either
// duplicates OR conflict with druid jars. In that case, there are two problems that arise // duplicates OR conflict with druid jars. In that case, there are two problems that arise
@ -107,7 +117,7 @@ public class PullDependencies implements Runnable
// Here is a list of dependencies extensions should probably exclude. // Here is a list of dependencies extensions should probably exclude.
// //
// Conflicts can be discovered using the following command on the distribution tarball: // Conflicts can be discovered using the following command on the distribution tarball:
// `find lib -iname *.jar | cut -d / -f 2 | sed -e 's/-[0-9]\.[0-9]/@/' | cut -f 1 -d @ | sort | uniq | xargs -I {} find extensions -name "*{}*.jar" | sort` // `find lib -iname "*.jar" | cut -d / -f 2 | sed -e 's/-[0-9]\.[0-9]/@/' | cut -f 1 -d @ | sort | uniq | xargs -I {} find extensions -name "*{}*.jar" | sort`
"org.apache.druid", "org.apache.druid",
"com.metamx.druid", "com.metamx.druid",
@ -141,11 +151,13 @@ public class PullDependencies implements Runnable
"org.roaringbitmap", "org.roaringbitmap",
"net.java.dev.jets3t" "net.java.dev.jets3t"
*/ */
);
private static final List<String> DEFAULT_REMOTE_REPOSITORIES = ImmutableList.of( private static final Dependencies SECURITY_VULNERABILITY_EXCLUSIONS =
"https://repo1.maven.org/maven2/" Dependencies.builder()
); .put("commons-beanutils", "commons-beanutils-core")
.build();
private final Dependencies hadoopExclusions;
private TeslaAether aether; private TeslaAether aether;
@ -155,107 +167,104 @@ public class PullDependencies implements Runnable
@Option( @Option(
name = {"-c", "--coordinate"}, name = {"-c", "--coordinate"},
title = "coordinate", title = "coordinate",
description = "Extension coordinate to pull down, followed by a maven coordinate, e.g. org.apache.druid.extensions:mysql-metadata-storage", description = "Extension coordinate to pull down, followed by a maven coordinate, e.g. org.apache.druid.extensions:mysql-metadata-storage"
required = false) )
public List<String> coordinates = new ArrayList<>(); public List<String> coordinates = new ArrayList<>();
@Option( @Option(
name = {"-h", "--hadoop-coordinate"}, name = {"-h", "--hadoop-coordinate"},
title = "hadoop coordinate", title = "hadoop coordinate",
description = "Hadoop dependency to pull down, followed by a maven coordinate, e.g. org.apache.hadoop:hadoop-client:2.4.0", description = "Hadoop dependency to pull down, followed by a maven coordinate, e.g. org.apache.hadoop:hadoop-client:2.4.0"
required = false) )
public List<String> hadoopCoordinates = new ArrayList<>(); public List<String> hadoopCoordinates = new ArrayList<>();
@Option( @Option(
name = "--no-default-hadoop", name = "--no-default-hadoop",
description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.5. If `-h` option is supplied, then default hadoop coordinate will not be downloaded.", description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.5. If `-h` option is supplied, then default hadoop coordinate will not be downloaded."
required = false) )
public boolean noDefaultHadoop = false; public boolean noDefaultHadoop = false;
@Option( @Option(
name = "--clean", name = "--clean",
title = "Remove exisiting extension and hadoop dependencies directories before pulling down dependencies.", title = "Remove exisiting extension and hadoop dependencies directories before pulling down dependencies."
required = false) )
public boolean clean = false; public boolean clean = false;
@Option( @Option(
name = {"-l", "--localRepository"}, name = {"-l", "--localRepository"},
title = "A local repository that Maven will use to put downloaded files. Then pull-deps will lay these files out into the extensions directory as needed.", title = "A local repository that Maven will use to put downloaded files. Then pull-deps will lay these files out into the extensions directory as needed."
required = false
) )
public String localRepository = StringUtils.format("%s/%s", System.getProperty("user.home"), ".m2/repository"); public String localRepository = StringUtils.format("%s/%s", System.getProperty("user.home"), ".m2/repository");
@Option( @Option(
name = {"-r", "--remoteRepository"}, name = {"-r", "--remoteRepository"},
title = "Add a remote repository. Unless --no-default-remote-repositories is provided, these will be used after https://repo1.maven.org/maven2/", title = "Add a remote repository. Unless --no-default-remote-repositories is provided, these will be used after https://repo1.maven.org/maven2/"
required = false
) )
List<String> remoteRepositories = new ArrayList<>(); List<String> remoteRepositories = new ArrayList<>();
@Option( @Option(
name = "--no-default-remote-repositories", name = "--no-default-remote-repositories",
description = "Don't use the default remote repositories, only use the repositories provided directly via --remoteRepository", description = "Don't use the default remote repositories, only use the repositories provided directly via --remoteRepository"
required = false) )
public boolean noDefaultRemoteRepositories = false; public boolean noDefaultRemoteRepositories = false;
@Option( @Option(
name = {"-d", "--defaultVersion"}, name = {"-d", "--defaultVersion"},
title = "Version to use for extension artifacts without version information.", title = "Version to use for extension artifacts without version information."
required = false
) )
public String defaultVersion = PullDependencies.class.getPackage().getImplementationVersion(); public String defaultVersion = PullDependencies.class.getPackage().getImplementationVersion();
@Option( @Option(
name = {"--use-proxy"}, name = {"--use-proxy"},
title = "Use http/https proxy to pull dependencies.", title = "Use http/https proxy to pull dependencies."
required = false
) )
public boolean useProxy = false; public boolean useProxy = false;
@Option( @Option(
name = {"--proxy-type"}, name = {"--proxy-type"},
title = "The proxy type, should be either http or https", title = "The proxy type, should be either http or https"
required = false
) )
public String proxyType = "https"; public String proxyType = "https";
@Option( @Option(
name = {"--proxy-host"}, name = {"--proxy-host"},
title = "The proxy host", title = "The proxy host"
required = false
) )
public String proxyHost = ""; public String proxyHost = "";
@Option( @Option(
name = {"--proxy-port"}, name = {"--proxy-port"},
title = "The proxy port", title = "The proxy port"
required = false
) )
public int proxyPort = -1; public int proxyPort = -1;
@Option( @Option(
name = {"--proxy-username"}, name = {"--proxy-username"},
title = "The proxy username", title = "The proxy username"
required = false
) )
public String proxyUsername = ""; public String proxyUsername = "";
@Option( @Option(
name = {"--proxy-password"}, name = {"--proxy-password"},
title = "The proxy password", title = "The proxy password"
required = false
) )
public String proxyPassword = ""; public String proxyPassword = "";
@SuppressWarnings("unused") // used by io.airlift:airline
public PullDependencies() public PullDependencies()
{ {
hadoopExclusions = Dependencies.builder()
.putAll(PROVIDED_BY_CORE_DEPENDENCIES)
.putAll(SECURITY_VULNERABILITY_EXCLUSIONS)
.build();
} }
// Used for testing only // Used for testing only
PullDependencies(TeslaAether aether, ExtensionsConfig extensionsConfig) PullDependencies(TeslaAether aether, ExtensionsConfig extensionsConfig, Dependencies hadoopExclusions)
{ {
this.aether = aether; this.aether = aether;
this.extensionsConfig = extensionsConfig; this.extensionsConfig = extensionsConfig;
this.hadoopExclusions = hadoopExclusions;
} }
@Override @Override
@ -315,7 +324,7 @@ public class PullDependencies implements Runnable
currExtensionDir = new File(currExtensionDir, versionedArtifact.getVersion()); currExtensionDir = new File(currExtensionDir, versionedArtifact.getVersion());
createExtensionDirectory(hadoopCoordinate, currExtensionDir); createExtensionDirectory(hadoopCoordinate, currExtensionDir);
downloadExtension(versionedArtifact, currExtensionDir); downloadExtension(versionedArtifact, currExtensionDir, hadoopExclusions);
} }
log.info("Finish downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates); log.info("Finish downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates);
} }
@ -349,6 +358,11 @@ public class PullDependencies implements Runnable
* @param toLocation The location where this extension will be downloaded to * @param toLocation The location where this extension will be downloaded to
*/ */
private void downloadExtension(Artifact versionedArtifact, File toLocation) private void downloadExtension(Artifact versionedArtifact, File toLocation)
{
downloadExtension(versionedArtifact, toLocation, PROVIDED_BY_CORE_DEPENDENCIES);
}
private void downloadExtension(Artifact versionedArtifact, File toLocation, Dependencies exclusions)
{ {
final CollectRequest collectRequest = new CollectRequest(); final CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new Dependency(versionedArtifact, JavaScopes.RUNTIME)); collectRequest.setRoot(new Dependency(versionedArtifact, JavaScopes.RUNTIME));
@ -356,41 +370,31 @@ public class PullDependencies implements Runnable
collectRequest, collectRequest,
DependencyFilterUtils.andFilter( DependencyFilterUtils.andFilter(
DependencyFilterUtils.classpathFilter(JavaScopes.RUNTIME), DependencyFilterUtils.classpathFilter(JavaScopes.RUNTIME),
new DependencyFilter() (node, parents) -> {
{ String scope = node.getDependency().getScope();
@Override if (scope != null) {
public boolean accept(DependencyNode node, List<DependencyNode> parents) scope = StringUtils.toLowerCase(scope);
{ if ("provided".equals(scope)) {
String scope = node.getDependency().getScope();
if (scope != null) {
scope = StringUtils.toLowerCase(scope);
if ("provided".equals(scope)) {
return false;
}
if ("test".equals(scope)) {
return false;
}
if ("system".equals(scope)) {
return false;
}
}
if (accept(node.getArtifact())) {
return false; return false;
} }
if ("test".equals(scope)) {
for (DependencyNode parent : parents) { return false;
if (accept(parent.getArtifact())) {
return false;
}
} }
if ("system".equals(scope)) {
return true; return false;
}
}
if (exclusions.contain(node.getArtifact())) {
return false;
} }
private boolean accept(final Artifact artifact) for (DependencyNode parent : parents) {
{ if (exclusions.contain(parent.getArtifact())) {
return EXCLUSIONS.contains(artifact.getGroupId()); return false;
}
} }
return true;
} }
) )
); );
@ -400,11 +404,11 @@ public class PullDependencies implements Runnable
final List<Artifact> artifacts = aether.resolveArtifacts(dependencyRequest); final List<Artifact> artifacts = aether.resolveArtifacts(dependencyRequest);
for (Artifact artifact : artifacts) { for (Artifact artifact : artifacts) {
if (!EXCLUSIONS.contains(artifact.getGroupId())) { if (exclusions.contain(artifact)) {
log.debug("Skipped Artifact[%s]", artifact);
} else {
log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath()); log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath());
org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), toLocation); org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), toLocation);
} else {
log.debug("Skipped Artifact[%s]", artifact);
} }
} }
} }
@ -514,15 +518,19 @@ public class PullDependencies implements Runnable
); );
} }
if (!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTP) && !StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTPS)) { if (!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTP) &&
!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTPS)) {
throw new IllegalArgumentException("invalid proxy type: " + proxyType); throw new IllegalArgumentException("invalid proxy type: " + proxyType);
} }
RepositorySystemSession repositorySystemSession = new RepositorySystemSessionProvider(new File(localRepository)).get(); RepositorySystemSession repositorySystemSession =
new RepositorySystemSessionProvider(new File(localRepository)).get();
List<RemoteRepository> rl = remoteRepositories.stream().map(r -> { List<RemoteRepository> rl = remoteRepositories.stream().map(r -> {
RemoteRepository.Builder builder = new RemoteRepository.Builder(r.getId(), "default", r.getUrl()); RemoteRepository.Builder builder = new RemoteRepository.Builder(r.getId(), "default", r.getUrl());
if (r.getUsername() != null && r.getPassword() != null) { if (r.getUsername() != null && r.getPassword() != null) {
Authentication auth = new AuthenticationBuilder().addUsername(r.getUsername()).addPassword(r.getPassword()).build(); Authentication auth = new AuthenticationBuilder().addUsername(r.getUsername())
.addPassword(r.getPassword())
.build();
builder.setAuthentication(auth); builder.setAuthentication(auth);
} }
@ -557,4 +565,60 @@ public class PullDependencies implements Runnable
); );
} }
} }
@VisibleForTesting
static class Dependencies
{
private static final String ANY_ARTIFACT_ID = "*";
private final SetMultimap<String, String> groupIdToArtifactIds;
private Dependencies(Builder builder)
{
groupIdToArtifactIds = builder.groupIdToArtifactIdsBuilder.build();
}
boolean contain(Artifact artifact)
{
Set<String> artifactIds = groupIdToArtifactIds.get(artifact.getGroupId());
return artifactIds.contains(ANY_ARTIFACT_ID) || artifactIds.contains(artifact.getArtifactId());
}
static Builder builder()
{
return new Builder();
}
static final class Builder
{
private final ImmutableSetMultimap.Builder<String, String> groupIdToArtifactIdsBuilder =
ImmutableSetMultimap.builder();
private Builder()
{
}
Builder putAll(Dependencies dependencies)
{
groupIdToArtifactIdsBuilder.putAll(dependencies.groupIdToArtifactIds);
return this;
}
Builder put(String groupId)
{
return put(groupId, ANY_ARTIFACT_ID);
}
Builder put(String groupId, String artifactId)
{
groupIdToArtifactIdsBuilder.put(groupId, artifactId);
return this;
}
Dependencies build()
{
return new Dependencies(this);
}
}
}
} }

View File

@ -20,12 +20,18 @@
package org.apache.druid.cli; package org.apache.druid.cli;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.tesla.aether.internal.DefaultTeslaAether; import io.tesla.aether.internal.DefaultTeslaAether;
import org.apache.druid.guice.ExtensionsConfig; import org.apache.druid.guice.ExtensionsConfig;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact; import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.graph.DefaultDependencyNode;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.DependencyFilter;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.resolution.DependencyRequest; import org.eclipse.aether.resolution.DependencyRequest;
import org.hamcrest.CoreMatchers;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -34,21 +40,38 @@ import org.junit.rules.TemporaryFolder;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
*/
public class PullDependenciesTest public class PullDependenciesTest
{ {
private static final String EXTENSION_A_COORDINATE = "groupX:extension_A:123"; private static final String EXTENSION_A_COORDINATE = "groupX:extension_A:123";
private static final String EXTENSION_B_COORDINATE = "groupY:extension_B:456"; private static final String EXTENSION_B_COORDINATE = "groupY:extension_B:456";
private static final String HADOOP_CLIENT_2_3_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.3.0"; private static final String HADOOP_CLIENT_2_3_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.3.0";
private static final String HADOOP_CLIENT_2_4_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.4.0"; private static final String HADOOP_CLIENT_2_4_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.4.0";
private static final String DEPENDENCY_GROUPID = "groupid";
private static final String HADOOP_CLIENT_VULNERABLE_ARTIFACTID1 = "vulnerable1";
private static final String HADOOP_CLIENT_VULNERABLE_ARTIFACTID2 = "vulnerable2";
private static final Set<String> HADOOP_CLIENT_VULNERABLE_ARTIFACTIDS = ImmutableSet.of(
HADOOP_CLIENT_VULNERABLE_ARTIFACTID1,
HADOOP_CLIENT_VULNERABLE_ARTIFACTID2
);
private static final String HADOOP_CLIENT_VULNERABLE_JAR1 = HADOOP_CLIENT_VULNERABLE_ARTIFACTID1 + ".jar";
private static final String HADOOP_CLIENT_VULNERABLE_JAR2 = HADOOP_CLIENT_VULNERABLE_ARTIFACTID2 + ".jar";
private static final PullDependencies.Dependencies HADOOP_EXCLUSIONS =
PullDependencies.Dependencies.builder()
.put(DEPENDENCY_GROUPID, HADOOP_CLIENT_VULNERABLE_ARTIFACTID1)
.put(DEPENDENCY_GROUPID, HADOOP_CLIENT_VULNERABLE_ARTIFACTID2)
.build();
@Rule @Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder(); public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@ -63,21 +86,24 @@ public class PullDependenciesTest
private File rootExtensionsDir; private File rootExtensionsDir;
private File rootHadoopDependenciesDir; private File rootHadoopDependenciesDir;
private HashMap<Artifact, List<String>> extensionToJars; // map Artifact to its associated jars' names private Map<Artifact, List<String>> extensionToDependency;
@Before @Before
public void setUp() throws Exception public void setUp() throws Exception
{ {
localRepo = temporaryFolder.newFolder(); localRepo = temporaryFolder.newFolder("local_repo");
extensionToJars = new HashMap<>(); extensionToDependency = new HashMap<>();
extensionToJars.put(extension_A, ImmutableList.of("a.jar", "b.jar", "c.jar")); extensionToDependency.put(extension_A, ImmutableList.of("a", "b", "c"));
extensionToJars.put(extension_B, ImmutableList.of("d.jar", "e.jar")); extensionToDependency.put(extension_B, ImmutableList.of("d", "e"));
extensionToJars.put(hadoop_client_2_3_0, ImmutableList.of("f.jar", "g.jar")); extensionToDependency.put(hadoop_client_2_3_0, ImmutableList.of("f", "g"));
extensionToJars.put(hadoop_client_2_4_0, ImmutableList.of("h.jar", "i.jar")); extensionToDependency.put(
hadoop_client_2_4_0,
ImmutableList.of("h", "i", HADOOP_CLIENT_VULNERABLE_ARTIFACTID1, HADOOP_CLIENT_VULNERABLE_ARTIFACTID2)
);
rootExtensionsDir = new File(temporaryFolder.getRoot(), "extensions"); rootExtensionsDir = temporaryFolder.newFolder("extensions");
rootHadoopDependenciesDir = new File(temporaryFolder.getRoot(), "druid_hadoop_dependencies"); rootHadoopDependenciesDir = temporaryFolder.newFolder("druid_hadoop_dependencies");
pullDependencies = new PullDependencies( pullDependencies = new PullDependencies(
new DefaultTeslaAether() new DefaultTeslaAether()
@ -85,7 +111,10 @@ public class PullDependenciesTest
@Override @Override
public List<Artifact> resolveArtifacts(DependencyRequest request) public List<Artifact> resolveArtifacts(DependencyRequest request)
{ {
return getArtifactsForExtension(request.getCollectRequest().getRoot().getArtifact()); return getArtifactsForExtension(
request.getCollectRequest().getRoot().getArtifact(),
request.getFilter()
);
} }
}, },
new ExtensionsConfig() new ExtensionsConfig()
@ -101,7 +130,8 @@ public class PullDependenciesTest
{ {
return rootHadoopDependenciesDir.getAbsolutePath(); return rootHadoopDependenciesDir.getAbsolutePath();
} }
} },
HADOOP_EXCLUSIONS
); );
pullDependencies.coordinates = ImmutableList.of(EXTENSION_A_COORDINATE, EXTENSION_B_COORDINATE); pullDependencies.coordinates = ImmutableList.of(EXTENSION_A_COORDINATE, EXTENSION_B_COORDINATE);
@ -109,47 +139,66 @@ public class PullDependenciesTest
HADOOP_CLIENT_2_3_0_COORDINATE, HADOOP_CLIENT_2_3_0_COORDINATE,
HADOOP_CLIENT_2_4_0_COORDINATE HADOOP_CLIENT_2_4_0_COORDINATE
); );
// Because --clean is specified, pull-deps will first remove existing root extensions and hadoop dependencies
pullDependencies.clean = true;
} }
private List<Artifact> getArtifactsForExtension(Artifact artifact) private List<Artifact> getArtifactsForExtension(Artifact artifact, DependencyFilter filter)
{ {
final List<String> jarNames = extensionToJars.get(artifact); final List<String> names = extensionToDependency.get(artifact);
final List<Artifact> artifacts = new ArrayList<>(); final List<Artifact> artifacts = new ArrayList<>();
for (String jarName : jarNames) { for (String name : names) {
final File jarFile = new File(localRepo, jarName); final File jarFile = new File(localRepo, name + ".jar");
try { try {
jarFile.createNewFile(); jarFile.createNewFile();
} }
catch (IOException e) { catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
artifacts.add(new DefaultArtifact(null, jarName, null, "jar", "1.0", null, jarFile));
DependencyNode node = new DefaultDependencyNode(
new Dependency(
new DefaultArtifact(DEPENDENCY_GROUPID, name, null, "jar", "1.0", null, jarFile),
"compile"
)
);
if (filter.accept(node, Collections.emptyList())) {
artifacts.add(node.getArtifact());
}
} }
return artifacts; return artifacts;
} }
private File[] getExpectedJarFiles(Artifact artifact) private List<File> getExpectedJarFiles(Artifact artifact)
{ {
final String artifactId = artifact.getArtifactId(); final String artifactId = artifact.getArtifactId();
final List<String> jarNames = extensionToJars.get(artifact); final List<String> names = extensionToDependency.get(artifact);
final File[] expectedJars = new File[jarNames.size()]; final List<File> expectedJars;
if ("hadoop-client".equals(artifactId)) { if ("hadoop-client".equals(artifactId)) {
final String version = artifact.getVersion(); final String version = artifact.getVersion();
for (int i = 0; i < jarNames.size(); ++i) { expectedJars = names.stream()
expectedJars[i] = new File( .filter(name -> !HADOOP_CLIENT_VULNERABLE_ARTIFACTIDS.contains(name))
StringUtils.format( .map(name -> new File(
"%s/%s/%s/%s", StringUtils.format(
rootHadoopDependenciesDir, "%s/%s/%s/%s",
artifactId, rootHadoopDependenciesDir,
version, artifactId,
jarNames.get(i) version,
) name + ".jar"
); )
} ))
.collect(Collectors.toList());
} else { } else {
for (int i = 0; i < jarNames.size(); ++i) { expectedJars = names.stream()
expectedJars[i] = new File(StringUtils.format("%s/%s/%s", rootExtensionsDir, artifactId, jarNames.get(i))); .map(name -> new File(
} StringUtils.format(
"%s/%s/%s",
rootExtensionsDir,
artifactId,
name + ".jar"
)))
.collect(Collectors.toList());
} }
return expectedJars; return expectedJars;
} }
@ -160,7 +209,6 @@ public class PullDependenciesTest
@Test() @Test()
public void testPullDependencies_root_extension_dir_exists() public void testPullDependencies_root_extension_dir_exists()
{ {
rootExtensionsDir.mkdir();
pullDependencies.run(); pullDependencies.run();
} }
@ -170,6 +218,7 @@ public class PullDependenciesTest
@Test(expected = RuntimeException.class) @Test(expected = RuntimeException.class)
public void testPullDependencies_root_extension_dir_bad_state() throws IOException public void testPullDependencies_root_extension_dir_bad_state() throws IOException
{ {
Assert.assertTrue(rootExtensionsDir.delete());
Assert.assertTrue(rootExtensionsDir.createNewFile()); Assert.assertTrue(rootExtensionsDir.createNewFile());
pullDependencies.run(); pullDependencies.run();
} }
@ -180,7 +229,6 @@ public class PullDependenciesTest
@Test() @Test()
public void testPullDependencies_root_hadoop_dependencies_dir_exists() public void testPullDependencies_root_hadoop_dependencies_dir_exists()
{ {
rootHadoopDependenciesDir.mkdir();
pullDependencies.run(); pullDependencies.run();
} }
@ -190,6 +238,7 @@ public class PullDependenciesTest
@Test(expected = RuntimeException.class) @Test(expected = RuntimeException.class)
public void testPullDependencies_root_hadoop_dependencies_dir_bad_state() throws IOException public void testPullDependencies_root_hadoop_dependencies_dir_bad_state() throws IOException
{ {
Assert.assertTrue(rootHadoopDependenciesDir.delete());
Assert.assertTrue(rootHadoopDependenciesDir.createNewFile()); Assert.assertTrue(rootHadoopDependenciesDir.createNewFile());
pullDependencies.run(); pullDependencies.run();
} }
@ -197,11 +246,6 @@ public class PullDependenciesTest
@Test @Test
public void testPullDependencies() public void testPullDependencies()
{ {
rootExtensionsDir.mkdir();
rootHadoopDependenciesDir.mkdir();
// Because --clean is specified, pull-deps will first remove existing root extensions and hadoop dependencies
pullDependencies.clean = true;
pullDependencies.run(); pullDependencies.run();
final File[] actualExtensions = rootExtensionsDir.listFiles(); final File[] actualExtensions = rootExtensionsDir.listFiles();
Arrays.sort(actualExtensions); Arrays.sort(actualExtensions);
@ -209,13 +253,13 @@ public class PullDependenciesTest
Assert.assertEquals(extension_A.getArtifactId(), actualExtensions[0].getName()); Assert.assertEquals(extension_A.getArtifactId(), actualExtensions[0].getName());
Assert.assertEquals(extension_B.getArtifactId(), actualExtensions[1].getName()); Assert.assertEquals(extension_B.getArtifactId(), actualExtensions[1].getName());
final File[] jarsUnderExtensionA = actualExtensions[0].listFiles(); final List<File> jarsUnderExtensionA = Arrays.asList(actualExtensions[0].listFiles());
Arrays.sort(jarsUnderExtensionA); Collections.sort(jarsUnderExtensionA);
Assert.assertArrayEquals(getExpectedJarFiles(extension_A), jarsUnderExtensionA); Assert.assertEquals(getExpectedJarFiles(extension_A), jarsUnderExtensionA);
final File[] jarsUnderExtensionB = actualExtensions[1].listFiles(); final List<File> jarsUnderExtensionB = Arrays.asList(actualExtensions[1].listFiles());
Arrays.sort(jarsUnderExtensionB); Collections.sort(jarsUnderExtensionB);
Assert.assertArrayEquals(getExpectedJarFiles(extension_B), jarsUnderExtensionB); Assert.assertEquals(getExpectedJarFiles(extension_B), jarsUnderExtensionB);
final File[] actualHadoopDependencies = rootHadoopDependenciesDir.listFiles(); final File[] actualHadoopDependencies = rootHadoopDependenciesDir.listFiles();
Arrays.sort(actualHadoopDependencies); Arrays.sort(actualHadoopDependencies);
@ -228,12 +272,31 @@ public class PullDependenciesTest
Assert.assertEquals(hadoop_client_2_3_0.getVersion(), versionDirsUnderHadoopClient[0].getName()); Assert.assertEquals(hadoop_client_2_3_0.getVersion(), versionDirsUnderHadoopClient[0].getName());
Assert.assertEquals(hadoop_client_2_4_0.getVersion(), versionDirsUnderHadoopClient[1].getName()); Assert.assertEquals(hadoop_client_2_4_0.getVersion(), versionDirsUnderHadoopClient[1].getName());
final File[] jarsUnder2_3_0 = versionDirsUnderHadoopClient[0].listFiles(); final List<File> jarsUnder2_3_0 = Arrays.asList(versionDirsUnderHadoopClient[0].listFiles());
Arrays.sort(jarsUnder2_3_0); Collections.sort(jarsUnder2_3_0);
Assert.assertArrayEquals(getExpectedJarFiles(hadoop_client_2_3_0), jarsUnder2_3_0); Assert.assertEquals(getExpectedJarFiles(hadoop_client_2_3_0), jarsUnder2_3_0);
final File[] jarsUnder2_4_0 = versionDirsUnderHadoopClient[1].listFiles(); final List<File> jarsUnder2_4_0 = Arrays.asList(versionDirsUnderHadoopClient[1].listFiles());
Arrays.sort(jarsUnder2_4_0); Collections.sort(jarsUnder2_4_0);
Assert.assertArrayEquals(getExpectedJarFiles(hadoop_client_2_4_0), jarsUnder2_4_0); Assert.assertEquals(getExpectedJarFiles(hadoop_client_2_4_0), jarsUnder2_4_0);
}
@Test
public void testPullDependeciesExcludesHadoopSecurityVulnerabilities()
{
pullDependencies.run();
File hadoopClient240 = new File(
rootHadoopDependenciesDir,
Paths.get(hadoop_client_2_4_0.getArtifactId(), hadoop_client_2_4_0.getVersion())
.toString()
);
Assert.assertTrue(hadoopClient240.exists());
List<String> dependencies = Arrays.stream(hadoopClient240.listFiles())
.map(File::getName)
.collect(Collectors.toList());
Assert.assertThat(dependencies, CoreMatchers.not(CoreMatchers.hasItem(HADOOP_CLIENT_VULNERABLE_JAR1)));
Assert.assertThat(dependencies, CoreMatchers.not(CoreMatchers.hasItem(HADOOP_CLIENT_VULNERABLE_JAR2)));
} }
} }