Exclude unneeded hadoop transitive dependencies (#8962)

* Exclude unneeded hadoop transitive dependencies

These dependencies are provided by core:
- com.squareup.okhttp:okhttp
- commons-beanutils:commons-beanutils
- org.apache.commons:commons-compress
- org.apache.zookepper:zookeeper

These dependencies are not needed and are excluded because they contain
security vulnerabilities:
- commons-beanutils:commons-beanutils-core
- org.codehaus.jackson:jackson-mapper-asl

* Simplify exclusions + separate unneeded/vulnerable

* Do not exclude jackson-mapper-asl
This commit is contained in:
Chi Cao Minh 2019-12-02 16:08:21 -08:00 committed by Jonathan Wei
parent 6997b167b1
commit 4b7e79a4e6
5 changed files with 277 additions and 130 deletions

View File

@ -227,6 +227,11 @@
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
</exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
<exclusion>
<!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind -->
<groupId>org.codehaus.jackson</groupId>

View File

@ -137,6 +137,11 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -245,6 +250,11 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
</exclusion>
<!-- Following are excluded to remove security vulnerabilities: -->
<exclusion>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils-core</artifactId>
</exclusion>
<exclusion>
<!-- excluded to remove security vulnerabilities; jackson-mapper-asl is renamed to jackson-databind -->
<groupId>org.codehaus.jackson</groupId>

View File

@ -166,6 +166,11 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -19,8 +19,11 @@
package org.apache.druid.cli;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.SetMultimap;
import com.google.inject.Inject;
import io.airlift.airline.Command;
import io.airlift.airline.Option;
@ -40,7 +43,6 @@ import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.collection.CollectRequest;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.DependencyFilter;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.repository.Authentication;
import org.eclipse.aether.repository.Proxy;
@ -58,7 +60,6 @@ import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@ -72,9 +73,18 @@ public class PullDependencies implements Runnable
{
private static final Logger log = new Logger(PullDependencies.class);
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
private static final Set<String> EXCLUSIONS = new HashSet<>(
/*
private static final List<String> DEFAULT_REMOTE_REPOSITORIES = ImmutableList.of(
"https://repo1.maven.org/maven2/"
);
private static final Dependencies PROVIDED_BY_CORE_DEPENDENCIES =
Dependencies.builder()
.put("com.squareup.okhttp", "okhttp")
.put("commons-beanutils", "commons-beanutils")
.put("org.apache.commons", "commons-compress")
.put("org.apache.zookeeper", "zookeeper")
.build();
/*
// It is possible that extensions will pull down a lot of jars that are either
// duplicates OR conflict with druid jars. In that case, there are two problems that arise
@ -107,7 +117,7 @@ public class PullDependencies implements Runnable
// Here is a list of dependencies extensions should probably exclude.
//
// Conflicts can be discovered using the following command on the distribution tarball:
// `find lib -iname *.jar | cut -d / -f 2 | sed -e 's/-[0-9]\.[0-9]/@/' | cut -f 1 -d @ | sort | uniq | xargs -I {} find extensions -name "*{}*.jar" | sort`
// `find lib -iname "*.jar" | cut -d / -f 2 | sed -e 's/-[0-9]\.[0-9]/@/' | cut -f 1 -d @ | sort | uniq | xargs -I {} find extensions -name "*{}*.jar" | sort`
"org.apache.druid",
"com.metamx.druid",
@ -141,11 +151,13 @@ public class PullDependencies implements Runnable
"org.roaringbitmap",
"net.java.dev.jets3t"
*/
);
private static final List<String> DEFAULT_REMOTE_REPOSITORIES = ImmutableList.of(
"https://repo1.maven.org/maven2/"
);
private static final Dependencies SECURITY_VULNERABILITY_EXCLUSIONS =
Dependencies.builder()
.put("commons-beanutils", "commons-beanutils-core")
.build();
private final Dependencies hadoopExclusions;
private TeslaAether aether;
@ -155,107 +167,104 @@ public class PullDependencies implements Runnable
@Option(
name = {"-c", "--coordinate"},
title = "coordinate",
description = "Extension coordinate to pull down, followed by a maven coordinate, e.g. org.apache.druid.extensions:mysql-metadata-storage",
required = false)
description = "Extension coordinate to pull down, followed by a maven coordinate, e.g. org.apache.druid.extensions:mysql-metadata-storage"
)
public List<String> coordinates = new ArrayList<>();
@Option(
name = {"-h", "--hadoop-coordinate"},
title = "hadoop coordinate",
description = "Hadoop dependency to pull down, followed by a maven coordinate, e.g. org.apache.hadoop:hadoop-client:2.4.0",
required = false)
description = "Hadoop dependency to pull down, followed by a maven coordinate, e.g. org.apache.hadoop:hadoop-client:2.4.0"
)
public List<String> hadoopCoordinates = new ArrayList<>();
@Option(
name = "--no-default-hadoop",
description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.5. If `-h` option is supplied, then default hadoop coordinate will not be downloaded.",
required = false)
description = "Don't pull down the default hadoop coordinate, i.e., org.apache.hadoop:hadoop-client:2.8.5. If `-h` option is supplied, then default hadoop coordinate will not be downloaded."
)
public boolean noDefaultHadoop = false;
@Option(
name = "--clean",
title = "Remove exisiting extension and hadoop dependencies directories before pulling down dependencies.",
required = false)
title = "Remove exisiting extension and hadoop dependencies directories before pulling down dependencies."
)
public boolean clean = false;
@Option(
name = {"-l", "--localRepository"},
title = "A local repository that Maven will use to put downloaded files. Then pull-deps will lay these files out into the extensions directory as needed.",
required = false
title = "A local repository that Maven will use to put downloaded files. Then pull-deps will lay these files out into the extensions directory as needed."
)
public String localRepository = StringUtils.format("%s/%s", System.getProperty("user.home"), ".m2/repository");
@Option(
name = {"-r", "--remoteRepository"},
title = "Add a remote repository. Unless --no-default-remote-repositories is provided, these will be used after https://repo1.maven.org/maven2/",
required = false
title = "Add a remote repository. Unless --no-default-remote-repositories is provided, these will be used after https://repo1.maven.org/maven2/"
)
List<String> remoteRepositories = new ArrayList<>();
@Option(
name = "--no-default-remote-repositories",
description = "Don't use the default remote repositories, only use the repositories provided directly via --remoteRepository",
required = false)
description = "Don't use the default remote repositories, only use the repositories provided directly via --remoteRepository"
)
public boolean noDefaultRemoteRepositories = false;
@Option(
name = {"-d", "--defaultVersion"},
title = "Version to use for extension artifacts without version information.",
required = false
title = "Version to use for extension artifacts without version information."
)
public String defaultVersion = PullDependencies.class.getPackage().getImplementationVersion();
@Option(
name = {"--use-proxy"},
title = "Use http/https proxy to pull dependencies.",
required = false
title = "Use http/https proxy to pull dependencies."
)
public boolean useProxy = false;
@Option(
name = {"--proxy-type"},
title = "The proxy type, should be either http or https",
required = false
title = "The proxy type, should be either http or https"
)
public String proxyType = "https";
@Option(
name = {"--proxy-host"},
title = "The proxy host",
required = false
title = "The proxy host"
)
public String proxyHost = "";
@Option(
name = {"--proxy-port"},
title = "The proxy port",
required = false
title = "The proxy port"
)
public int proxyPort = -1;
@Option(
name = {"--proxy-username"},
title = "The proxy username",
required = false
title = "The proxy username"
)
public String proxyUsername = "";
@Option(
name = {"--proxy-password"},
title = "The proxy password",
required = false
title = "The proxy password"
)
public String proxyPassword = "";
@SuppressWarnings("unused") // used by io.airlift:airline
public PullDependencies()
{
hadoopExclusions = Dependencies.builder()
.putAll(PROVIDED_BY_CORE_DEPENDENCIES)
.putAll(SECURITY_VULNERABILITY_EXCLUSIONS)
.build();
}
// Used for testing only
PullDependencies(TeslaAether aether, ExtensionsConfig extensionsConfig)
PullDependencies(TeslaAether aether, ExtensionsConfig extensionsConfig, Dependencies hadoopExclusions)
{
this.aether = aether;
this.extensionsConfig = extensionsConfig;
this.hadoopExclusions = hadoopExclusions;
}
@Override
@ -315,7 +324,7 @@ public class PullDependencies implements Runnable
currExtensionDir = new File(currExtensionDir, versionedArtifact.getVersion());
createExtensionDirectory(hadoopCoordinate, currExtensionDir);
downloadExtension(versionedArtifact, currExtensionDir);
downloadExtension(versionedArtifact, currExtensionDir, hadoopExclusions);
}
log.info("Finish downloading dependencies for hadoop extension coordinates: [%s]", hadoopCoordinates);
}
@ -349,6 +358,11 @@ public class PullDependencies implements Runnable
* @param toLocation The location where this extension will be downloaded to
*/
private void downloadExtension(Artifact versionedArtifact, File toLocation)
{
downloadExtension(versionedArtifact, toLocation, PROVIDED_BY_CORE_DEPENDENCIES);
}
private void downloadExtension(Artifact versionedArtifact, File toLocation, Dependencies exclusions)
{
final CollectRequest collectRequest = new CollectRequest();
collectRequest.setRoot(new Dependency(versionedArtifact, JavaScopes.RUNTIME));
@ -356,41 +370,31 @@ public class PullDependencies implements Runnable
collectRequest,
DependencyFilterUtils.andFilter(
DependencyFilterUtils.classpathFilter(JavaScopes.RUNTIME),
new DependencyFilter()
{
@Override
public boolean accept(DependencyNode node, List<DependencyNode> parents)
{
String scope = node.getDependency().getScope();
if (scope != null) {
scope = StringUtils.toLowerCase(scope);
if ("provided".equals(scope)) {
return false;
}
if ("test".equals(scope)) {
return false;
}
if ("system".equals(scope)) {
return false;
}
}
if (accept(node.getArtifact())) {
(node, parents) -> {
String scope = node.getDependency().getScope();
if (scope != null) {
scope = StringUtils.toLowerCase(scope);
if ("provided".equals(scope)) {
return false;
}
for (DependencyNode parent : parents) {
if (accept(parent.getArtifact())) {
return false;
}
if ("test".equals(scope)) {
return false;
}
return true;
if ("system".equals(scope)) {
return false;
}
}
if (exclusions.contain(node.getArtifact())) {
return false;
}
private boolean accept(final Artifact artifact)
{
return EXCLUSIONS.contains(artifact.getGroupId());
for (DependencyNode parent : parents) {
if (exclusions.contain(parent.getArtifact())) {
return false;
}
}
return true;
}
)
);
@ -400,11 +404,11 @@ public class PullDependencies implements Runnable
final List<Artifact> artifacts = aether.resolveArtifacts(dependencyRequest);
for (Artifact artifact : artifacts) {
if (!EXCLUSIONS.contains(artifact.getGroupId())) {
if (exclusions.contain(artifact)) {
log.debug("Skipped Artifact[%s]", artifact);
} else {
log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath());
org.apache.commons.io.FileUtils.copyFileToDirectory(artifact.getFile(), toLocation);
} else {
log.debug("Skipped Artifact[%s]", artifact);
}
}
}
@ -514,15 +518,19 @@ public class PullDependencies implements Runnable
);
}
if (!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTP) && !StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTPS)) {
if (!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTP) &&
!StringUtils.toLowerCase(proxyType).equals(Proxy.TYPE_HTTPS)) {
throw new IllegalArgumentException("invalid proxy type: " + proxyType);
}
RepositorySystemSession repositorySystemSession = new RepositorySystemSessionProvider(new File(localRepository)).get();
RepositorySystemSession repositorySystemSession =
new RepositorySystemSessionProvider(new File(localRepository)).get();
List<RemoteRepository> rl = remoteRepositories.stream().map(r -> {
RemoteRepository.Builder builder = new RemoteRepository.Builder(r.getId(), "default", r.getUrl());
if (r.getUsername() != null && r.getPassword() != null) {
Authentication auth = new AuthenticationBuilder().addUsername(r.getUsername()).addPassword(r.getPassword()).build();
Authentication auth = new AuthenticationBuilder().addUsername(r.getUsername())
.addPassword(r.getPassword())
.build();
builder.setAuthentication(auth);
}
@ -557,4 +565,60 @@ public class PullDependencies implements Runnable
);
}
}
@VisibleForTesting
static class Dependencies
{
private static final String ANY_ARTIFACT_ID = "*";
private final SetMultimap<String, String> groupIdToArtifactIds;
private Dependencies(Builder builder)
{
groupIdToArtifactIds = builder.groupIdToArtifactIdsBuilder.build();
}
boolean contain(Artifact artifact)
{
Set<String> artifactIds = groupIdToArtifactIds.get(artifact.getGroupId());
return artifactIds.contains(ANY_ARTIFACT_ID) || artifactIds.contains(artifact.getArtifactId());
}
static Builder builder()
{
return new Builder();
}
static final class Builder
{
private final ImmutableSetMultimap.Builder<String, String> groupIdToArtifactIdsBuilder =
ImmutableSetMultimap.builder();
private Builder()
{
}
Builder putAll(Dependencies dependencies)
{
groupIdToArtifactIdsBuilder.putAll(dependencies.groupIdToArtifactIds);
return this;
}
Builder put(String groupId)
{
return put(groupId, ANY_ARTIFACT_ID);
}
Builder put(String groupId, String artifactId)
{
groupIdToArtifactIdsBuilder.put(groupId, artifactId);
return this;
}
Dependencies build()
{
return new Dependencies(this);
}
}
}
}

View File

@ -20,12 +20,18 @@
package org.apache.druid.cli;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.tesla.aether.internal.DefaultTeslaAether;
import org.apache.druid.guice.ExtensionsConfig;
import org.apache.druid.java.util.common.StringUtils;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.graph.DefaultDependencyNode;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.DependencyFilter;
import org.eclipse.aether.graph.DependencyNode;
import org.eclipse.aether.resolution.DependencyRequest;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
@ -34,21 +40,38 @@ import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
*/
public class PullDependenciesTest
{
private static final String EXTENSION_A_COORDINATE = "groupX:extension_A:123";
private static final String EXTENSION_B_COORDINATE = "groupY:extension_B:456";
private static final String HADOOP_CLIENT_2_3_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.3.0";
private static final String HADOOP_CLIENT_2_4_0_COORDINATE = "org.apache.hadoop:hadoop-client:2.4.0";
private static final String DEPENDENCY_GROUPID = "groupid";
private static final String HADOOP_CLIENT_VULNERABLE_ARTIFACTID1 = "vulnerable1";
private static final String HADOOP_CLIENT_VULNERABLE_ARTIFACTID2 = "vulnerable2";
private static final Set<String> HADOOP_CLIENT_VULNERABLE_ARTIFACTIDS = ImmutableSet.of(
HADOOP_CLIENT_VULNERABLE_ARTIFACTID1,
HADOOP_CLIENT_VULNERABLE_ARTIFACTID2
);
private static final String HADOOP_CLIENT_VULNERABLE_JAR1 = HADOOP_CLIENT_VULNERABLE_ARTIFACTID1 + ".jar";
private static final String HADOOP_CLIENT_VULNERABLE_JAR2 = HADOOP_CLIENT_VULNERABLE_ARTIFACTID2 + ".jar";
private static final PullDependencies.Dependencies HADOOP_EXCLUSIONS =
PullDependencies.Dependencies.builder()
.put(DEPENDENCY_GROUPID, HADOOP_CLIENT_VULNERABLE_ARTIFACTID1)
.put(DEPENDENCY_GROUPID, HADOOP_CLIENT_VULNERABLE_ARTIFACTID2)
.build();
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@ -63,21 +86,24 @@ public class PullDependenciesTest
private File rootExtensionsDir;
private File rootHadoopDependenciesDir;
private HashMap<Artifact, List<String>> extensionToJars; // map Artifact to its associated jars' names
private Map<Artifact, List<String>> extensionToDependency;
@Before
public void setUp() throws Exception
{
localRepo = temporaryFolder.newFolder();
extensionToJars = new HashMap<>();
localRepo = temporaryFolder.newFolder("local_repo");
extensionToDependency = new HashMap<>();
extensionToJars.put(extension_A, ImmutableList.of("a.jar", "b.jar", "c.jar"));
extensionToJars.put(extension_B, ImmutableList.of("d.jar", "e.jar"));
extensionToJars.put(hadoop_client_2_3_0, ImmutableList.of("f.jar", "g.jar"));
extensionToJars.put(hadoop_client_2_4_0, ImmutableList.of("h.jar", "i.jar"));
extensionToDependency.put(extension_A, ImmutableList.of("a", "b", "c"));
extensionToDependency.put(extension_B, ImmutableList.of("d", "e"));
extensionToDependency.put(hadoop_client_2_3_0, ImmutableList.of("f", "g"));
extensionToDependency.put(
hadoop_client_2_4_0,
ImmutableList.of("h", "i", HADOOP_CLIENT_VULNERABLE_ARTIFACTID1, HADOOP_CLIENT_VULNERABLE_ARTIFACTID2)
);
rootExtensionsDir = new File(temporaryFolder.getRoot(), "extensions");
rootHadoopDependenciesDir = new File(temporaryFolder.getRoot(), "druid_hadoop_dependencies");
rootExtensionsDir = temporaryFolder.newFolder("extensions");
rootHadoopDependenciesDir = temporaryFolder.newFolder("druid_hadoop_dependencies");
pullDependencies = new PullDependencies(
new DefaultTeslaAether()
@ -85,7 +111,10 @@ public class PullDependenciesTest
@Override
public List<Artifact> resolveArtifacts(DependencyRequest request)
{
return getArtifactsForExtension(request.getCollectRequest().getRoot().getArtifact());
return getArtifactsForExtension(
request.getCollectRequest().getRoot().getArtifact(),
request.getFilter()
);
}
},
new ExtensionsConfig()
@ -101,7 +130,8 @@ public class PullDependenciesTest
{
return rootHadoopDependenciesDir.getAbsolutePath();
}
}
},
HADOOP_EXCLUSIONS
);
pullDependencies.coordinates = ImmutableList.of(EXTENSION_A_COORDINATE, EXTENSION_B_COORDINATE);
@ -109,47 +139,66 @@ public class PullDependenciesTest
HADOOP_CLIENT_2_3_0_COORDINATE,
HADOOP_CLIENT_2_4_0_COORDINATE
);
// Because --clean is specified, pull-deps will first remove existing root extensions and hadoop dependencies
pullDependencies.clean = true;
}
private List<Artifact> getArtifactsForExtension(Artifact artifact)
private List<Artifact> getArtifactsForExtension(Artifact artifact, DependencyFilter filter)
{
final List<String> jarNames = extensionToJars.get(artifact);
final List<String> names = extensionToDependency.get(artifact);
final List<Artifact> artifacts = new ArrayList<>();
for (String jarName : jarNames) {
final File jarFile = new File(localRepo, jarName);
for (String name : names) {
final File jarFile = new File(localRepo, name + ".jar");
try {
jarFile.createNewFile();
}
catch (IOException e) {
throw new RuntimeException(e);
}
artifacts.add(new DefaultArtifact(null, jarName, null, "jar", "1.0", null, jarFile));
DependencyNode node = new DefaultDependencyNode(
new Dependency(
new DefaultArtifact(DEPENDENCY_GROUPID, name, null, "jar", "1.0", null, jarFile),
"compile"
)
);
if (filter.accept(node, Collections.emptyList())) {
artifacts.add(node.getArtifact());
}
}
return artifacts;
}
private File[] getExpectedJarFiles(Artifact artifact)
private List<File> getExpectedJarFiles(Artifact artifact)
{
final String artifactId = artifact.getArtifactId();
final List<String> jarNames = extensionToJars.get(artifact);
final File[] expectedJars = new File[jarNames.size()];
final List<String> names = extensionToDependency.get(artifact);
final List<File> expectedJars;
if ("hadoop-client".equals(artifactId)) {
final String version = artifact.getVersion();
for (int i = 0; i < jarNames.size(); ++i) {
expectedJars[i] = new File(
StringUtils.format(
"%s/%s/%s/%s",
rootHadoopDependenciesDir,
artifactId,
version,
jarNames.get(i)
)
);
}
expectedJars = names.stream()
.filter(name -> !HADOOP_CLIENT_VULNERABLE_ARTIFACTIDS.contains(name))
.map(name -> new File(
StringUtils.format(
"%s/%s/%s/%s",
rootHadoopDependenciesDir,
artifactId,
version,
name + ".jar"
)
))
.collect(Collectors.toList());
} else {
for (int i = 0; i < jarNames.size(); ++i) {
expectedJars[i] = new File(StringUtils.format("%s/%s/%s", rootExtensionsDir, artifactId, jarNames.get(i)));
}
expectedJars = names.stream()
.map(name -> new File(
StringUtils.format(
"%s/%s/%s",
rootExtensionsDir,
artifactId,
name + ".jar"
)))
.collect(Collectors.toList());
}
return expectedJars;
}
@ -160,7 +209,6 @@ public class PullDependenciesTest
@Test()
public void testPullDependencies_root_extension_dir_exists()
{
rootExtensionsDir.mkdir();
pullDependencies.run();
}
@ -170,6 +218,7 @@ public class PullDependenciesTest
@Test(expected = RuntimeException.class)
public void testPullDependencies_root_extension_dir_bad_state() throws IOException
{
Assert.assertTrue(rootExtensionsDir.delete());
Assert.assertTrue(rootExtensionsDir.createNewFile());
pullDependencies.run();
}
@ -180,7 +229,6 @@ public class PullDependenciesTest
@Test()
public void testPullDependencies_root_hadoop_dependencies_dir_exists()
{
rootHadoopDependenciesDir.mkdir();
pullDependencies.run();
}
@ -190,6 +238,7 @@ public class PullDependenciesTest
@Test(expected = RuntimeException.class)
public void testPullDependencies_root_hadoop_dependencies_dir_bad_state() throws IOException
{
Assert.assertTrue(rootHadoopDependenciesDir.delete());
Assert.assertTrue(rootHadoopDependenciesDir.createNewFile());
pullDependencies.run();
}
@ -197,11 +246,6 @@ public class PullDependenciesTest
@Test
public void testPullDependencies()
{
rootExtensionsDir.mkdir();
rootHadoopDependenciesDir.mkdir();
// Because --clean is specified, pull-deps will first remove existing root extensions and hadoop dependencies
pullDependencies.clean = true;
pullDependencies.run();
final File[] actualExtensions = rootExtensionsDir.listFiles();
Arrays.sort(actualExtensions);
@ -209,13 +253,13 @@ public class PullDependenciesTest
Assert.assertEquals(extension_A.getArtifactId(), actualExtensions[0].getName());
Assert.assertEquals(extension_B.getArtifactId(), actualExtensions[1].getName());
final File[] jarsUnderExtensionA = actualExtensions[0].listFiles();
Arrays.sort(jarsUnderExtensionA);
Assert.assertArrayEquals(getExpectedJarFiles(extension_A), jarsUnderExtensionA);
final List<File> jarsUnderExtensionA = Arrays.asList(actualExtensions[0].listFiles());
Collections.sort(jarsUnderExtensionA);
Assert.assertEquals(getExpectedJarFiles(extension_A), jarsUnderExtensionA);
final File[] jarsUnderExtensionB = actualExtensions[1].listFiles();
Arrays.sort(jarsUnderExtensionB);
Assert.assertArrayEquals(getExpectedJarFiles(extension_B), jarsUnderExtensionB);
final List<File> jarsUnderExtensionB = Arrays.asList(actualExtensions[1].listFiles());
Collections.sort(jarsUnderExtensionB);
Assert.assertEquals(getExpectedJarFiles(extension_B), jarsUnderExtensionB);
final File[] actualHadoopDependencies = rootHadoopDependenciesDir.listFiles();
Arrays.sort(actualHadoopDependencies);
@ -228,12 +272,31 @@ public class PullDependenciesTest
Assert.assertEquals(hadoop_client_2_3_0.getVersion(), versionDirsUnderHadoopClient[0].getName());
Assert.assertEquals(hadoop_client_2_4_0.getVersion(), versionDirsUnderHadoopClient[1].getName());
final File[] jarsUnder2_3_0 = versionDirsUnderHadoopClient[0].listFiles();
Arrays.sort(jarsUnder2_3_0);
Assert.assertArrayEquals(getExpectedJarFiles(hadoop_client_2_3_0), jarsUnder2_3_0);
final List<File> jarsUnder2_3_0 = Arrays.asList(versionDirsUnderHadoopClient[0].listFiles());
Collections.sort(jarsUnder2_3_0);
Assert.assertEquals(getExpectedJarFiles(hadoop_client_2_3_0), jarsUnder2_3_0);
final File[] jarsUnder2_4_0 = versionDirsUnderHadoopClient[1].listFiles();
Arrays.sort(jarsUnder2_4_0);
Assert.assertArrayEquals(getExpectedJarFiles(hadoop_client_2_4_0), jarsUnder2_4_0);
final List<File> jarsUnder2_4_0 = Arrays.asList(versionDirsUnderHadoopClient[1].listFiles());
Collections.sort(jarsUnder2_4_0);
Assert.assertEquals(getExpectedJarFiles(hadoop_client_2_4_0), jarsUnder2_4_0);
}
@Test
public void testPullDependeciesExcludesHadoopSecurityVulnerabilities()
{
pullDependencies.run();
File hadoopClient240 = new File(
rootHadoopDependenciesDir,
Paths.get(hadoop_client_2_4_0.getArtifactId(), hadoop_client_2_4_0.getVersion())
.toString()
);
Assert.assertTrue(hadoopClient240.exists());
List<String> dependencies = Arrays.stream(hadoopClient240.listFiles())
.map(File::getName)
.collect(Collectors.toList());
Assert.assertThat(dependencies, CoreMatchers.not(CoreMatchers.hasItem(HADOOP_CLIENT_VULNERABLE_JAR1)));
Assert.assertThat(dependencies, CoreMatchers.not(CoreMatchers.hasItem(HADOOP_CLIENT_VULNERABLE_JAR2)));
}
}