2015-11-24 18:04:40 -05:00
|
|
|
/*
|
|
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
|
|
* license agreements. See the NOTICE file distributed with
|
|
|
|
* this work for additional information regarding copyright
|
|
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
* not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing,
|
|
|
|
* software distributed under the License is distributed on an
|
|
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
* KIND, either express or implied. See the License for the
|
|
|
|
* specific language governing permissions and limitations
|
|
|
|
* under the License.
|
|
|
|
*/
|
|
|
|
|
2015-12-21 02:21:53 -05:00
|
|
|
import org.apache.tools.ant.taskdefs.condition.Os
|
2017-05-10 17:42:20 -04:00
|
|
|
import org.elasticsearch.gradle.test.RestIntegTestTask
|
|
|
|
|
2015-12-21 02:21:53 -05:00
|
|
|
import java.nio.file.Files
|
|
|
|
import java.nio.file.Path
|
|
|
|
import java.nio.file.Paths
|
2015-11-24 18:04:40 -05:00
|
|
|
|
|
|
|
esplugin {
|
|
|
|
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
|
2015-12-21 12:39:40 -05:00
|
|
|
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
|
2015-11-24 18:04:40 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
versions << [
|
|
|
|
'hadoop2': '2.7.1'
|
|
|
|
]
|
|
|
|
|
2015-12-20 16:00:37 -05:00
|
|
|
configurations {
|
|
|
|
hdfsFixture
|
|
|
|
}
|
|
|
|
|
2015-12-18 18:26:58 -05:00
|
|
|
dependencies {
|
2015-12-18 19:52:22 -05:00
|
|
|
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
|
2015-12-18 22:11:58 -05:00
|
|
|
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
|
|
|
|
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
|
2017-03-01 16:29:06 -05:00
|
|
|
compile 'com.google.guava:guava:11.0.2'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'com.google.protobuf:protobuf-java:2.5.0'
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-logging:commons-logging:1.1.3'
|
2016-03-09 03:10:59 -05:00
|
|
|
compile 'commons-cli:commons-cli:1.2'
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-collections:commons-collections:3.2.2'
|
|
|
|
compile 'commons-configuration:commons-configuration:1.6'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'commons-io:commons-io:2.4'
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-lang:commons-lang:2.6'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'javax.servlet:servlet-api:2.5'
|
2015-12-20 22:08:18 -05:00
|
|
|
compile "org.slf4j:slf4j-api:${versions.slf4j}"
|
2015-12-20 16:00:37 -05:00
|
|
|
|
|
|
|
hdfsFixture project(':test:fixtures:hdfs-fixture')
|
2015-12-18 18:26:58 -05:00
|
|
|
}
|
2015-11-24 18:04:40 -05:00
|
|
|
|
|
|
|
dependencyLicenses {
|
2015-12-18 18:26:58 -05:00
|
|
|
mapping from: /hadoop-.*/, to: 'hadoop'
|
2015-11-24 18:04:40 -05:00
|
|
|
}
|
|
|
|
|
2017-05-04 10:17:55 -04:00
|
|
|
task hdfsFixture(type: org.elasticsearch.gradle.test.AntFixture) {
|
2015-12-20 16:00:37 -05:00
|
|
|
dependsOn project.configurations.hdfsFixture
|
|
|
|
executable = new File(project.javaHome, 'bin/java')
|
2015-12-21 18:23:28 -05:00
|
|
|
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
|
|
|
|
args 'hdfs.MiniHDFS',
|
2015-12-20 16:00:37 -05:00
|
|
|
baseDir
|
|
|
|
}
|
|
|
|
|
2017-05-10 17:42:20 -04:00
|
|
|
// MIT Kerberos Vagrant Testing Fixture
|
|
|
|
String box = "krb5kdc"
|
|
|
|
Map<String,String> vagrantEnvVars = [
|
|
|
|
'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}",
|
|
|
|
'VAGRANT_VAGRANTFILE' : 'Vagrantfile',
|
|
|
|
'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}"
|
|
|
|
]
|
|
|
|
|
|
|
|
task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
|
|
|
|
command 'box'
|
|
|
|
subcommand 'update'
|
|
|
|
boxName box
|
|
|
|
environmentVars vagrantEnvVars
|
|
|
|
}
|
|
|
|
|
|
|
|
task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) {
|
|
|
|
command 'up'
|
|
|
|
args '--provision', '--provider', 'virtualbox'
|
|
|
|
boxName box
|
|
|
|
environmentVars vagrantEnvVars
|
|
|
|
dependsOn krb5kdcUpdate
|
|
|
|
}
|
|
|
|
|
|
|
|
task krb5AddPrincipals {
|
|
|
|
dependsOn krb5kdcFixture
|
|
|
|
}
|
|
|
|
|
|
|
|
List<String> principals = [ "elasticsearch", "hdfs/hdfs.build.elastic.co" ]
|
|
|
|
String realm = "BUILD.ELASTIC.CO"
|
|
|
|
|
|
|
|
for (String principal : principals) {
|
|
|
|
Task create = project.tasks.create("addPrincipal#${principal}", org.elasticsearch.gradle.vagrant.VagrantCommandTask) {
|
|
|
|
command 'ssh'
|
|
|
|
args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal"
|
|
|
|
boxName box
|
|
|
|
environmentVars vagrantEnvVars
|
|
|
|
dependsOn krb5kdcFixture
|
|
|
|
}
|
|
|
|
krb5AddPrincipals.dependsOn(create)
|
|
|
|
}
|
|
|
|
|
|
|
|
task secureHdfsFixture(type: org.elasticsearch.gradle.test.AntFixture) {
|
|
|
|
dependsOn project.configurations.hdfsFixture, krb5kdcFixture, krb5AddPrincipals
|
|
|
|
executable = new File(project.javaHome, 'bin/java')
|
|
|
|
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
|
|
|
|
|
|
|
|
Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab")
|
|
|
|
Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf")
|
|
|
|
|
|
|
|
args "-Djava.security.krb5.conf=${krb5Config}", 'hdfs.MiniHDFS',
|
|
|
|
baseDir,
|
|
|
|
"hdfs/hdfs.build.elastic.co@${realm}",
|
|
|
|
"${keytabPath}"
|
|
|
|
}
|
|
|
|
|
2017-02-23 14:19:08 -05:00
|
|
|
boolean fixtureSupported = false;
|
|
|
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
|
|
|
// hdfs fixture will not start without hadoop native libraries on windows
|
|
|
|
String nativePath = System.getenv("HADOOP_HOME")
|
|
|
|
if (nativePath != null) {
|
|
|
|
Path path = Paths.get(nativePath);
|
|
|
|
if (Files.isDirectory(path) &&
|
|
|
|
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
|
|
|
|
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
|
|
|
|
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
|
|
|
|
fixtureSupported = true
|
|
|
|
} else {
|
2017-05-25 15:11:33 -04:00
|
|
|
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
|
2015-12-21 02:21:53 -05:00
|
|
|
}
|
|
|
|
}
|
2017-02-23 14:19:08 -05:00
|
|
|
} else {
|
|
|
|
fixtureSupported = true
|
|
|
|
}
|
2015-12-21 02:21:53 -05:00
|
|
|
|
2017-02-23 14:19:08 -05:00
|
|
|
if (fixtureSupported) {
|
2017-04-17 19:02:46 -04:00
|
|
|
integTestCluster.dependsOn hdfsFixture
|
2017-05-10 17:42:20 -04:00
|
|
|
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository'
|
2017-02-23 14:19:08 -05:00
|
|
|
} else {
|
|
|
|
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
|
|
|
|
// just tests that the plugin loads
|
|
|
|
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
|
2015-12-20 16:00:37 -05:00
|
|
|
}
|
|
|
|
|
2017-05-10 17:42:20 -04:00
|
|
|
boolean secureFixtureSupported = false;
|
|
|
|
if (fixtureSupported) {
|
|
|
|
// Only do secure fixture support if the regular fixture is supported,
|
|
|
|
// and if vagrant is installed. The ignoreExitValue on exec only matters
|
|
|
|
// in cases where the command can be found and successfully started. In
|
|
|
|
// situations where the vagrant command isn't able to be started at all
|
|
|
|
// (it's not installed) then Gradle still throws ExecException.
|
|
|
|
ByteArrayOutputStream pipe = new ByteArrayOutputStream()
|
|
|
|
try {
|
|
|
|
ExecResult runResult = exec {
|
|
|
|
commandLine 'vagrant', '--version'
|
|
|
|
standardOutput pipe
|
|
|
|
ignoreExitValue true
|
|
|
|
}
|
|
|
|
String output = pipe.toString().trim()
|
|
|
|
if (runResult.exitValue == 0) {
|
|
|
|
secureFixtureSupported = (output ==~ /Vagrant 1\.(8\.[6-9]|9\.[0-9])+/)
|
|
|
|
} else {
|
|
|
|
logger.warn("Could not read installed vagrant version:\n" + output)
|
|
|
|
}
|
|
|
|
} catch (org.gradle.process.internal.ExecException e) {
|
|
|
|
logger.warn("Could not find vagrant: " + e.message)
|
|
|
|
// Swallow error. Vagrant isn't installed. Leave secure fixture support off.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create a Integration Test suite just for security based tests
|
2017-05-12 07:58:30 -04:00
|
|
|
if (secureFixtureSupported && false) { // This fails due to a vagrant configuration issue - remove the false check to re-enable
|
2017-05-10 17:42:20 -04:00
|
|
|
// This must execute before the afterEvaluate block from integTestSecure
|
|
|
|
project.afterEvaluate {
|
|
|
|
Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath()
|
|
|
|
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath()
|
|
|
|
|
|
|
|
project.integTestSecureCluster.dependsOn(project.bundlePlugin)
|
|
|
|
project.integTestSecure.clusterConfig.plugin(project.path)
|
|
|
|
project.integTestSecure.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}")
|
|
|
|
project.integTestSecure.clusterConfig.jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
|
|
|
|
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
|
|
|
|
" " + "-Djava.security.krb5.conf=${krb5conf}" +
|
|
|
|
" " + System.getProperty('tests.jvm.argline', '')
|
|
|
|
}
|
|
|
|
|
|
|
|
RestIntegTestTask integTestSecure = project.tasks.create('integTestSecure', RestIntegTestTask.class) {
|
|
|
|
description = "Runs rest tests against an elasticsearch cluster with HDFS secured by MIT Kerberos."
|
|
|
|
}
|
|
|
|
|
|
|
|
integTestSecure.mustRunAfter(project.integTest)
|
|
|
|
project.check.dependsOn(integTestSecure)
|
|
|
|
|
|
|
|
// Fixture dependencies
|
|
|
|
integTestSecureCluster.dependsOn secureHdfsFixture, krb5kdcFixture
|
|
|
|
integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
|
|
|
|
} else {
|
|
|
|
logger.warn("secured hdfsFixture is unsupported, please install Vagrant 1.8.6+ to enable")
|
|
|
|
}
|
|
|
|
|
2015-12-18 23:45:05 -05:00
|
|
|
thirdPartyAudit.excludes = [
|
2015-12-28 22:38:55 -05:00
|
|
|
// classes are missing, because we added hadoop jars one by one until tests pass.
|
|
|
|
'com.google.gson.stream.JsonReader',
|
|
|
|
'com.google.gson.stream.JsonWriter',
|
|
|
|
'com.jcraft.jsch.ChannelExec',
|
|
|
|
'com.jcraft.jsch.JSch',
|
|
|
|
'com.jcraft.jsch.Logger',
|
|
|
|
'com.jcraft.jsch.Session',
|
|
|
|
'com.sun.jersey.api.ParamException',
|
|
|
|
'com.sun.jersey.api.core.HttpContext',
|
|
|
|
'com.sun.jersey.core.spi.component.ComponentContext',
|
|
|
|
'com.sun.jersey.core.spi.component.ComponentScope',
|
|
|
|
'com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable',
|
|
|
|
'com.sun.jersey.spi.container.ContainerRequest',
|
|
|
|
'com.sun.jersey.spi.container.ContainerRequestFilter',
|
|
|
|
'com.sun.jersey.spi.container.ContainerResponseFilter',
|
|
|
|
'com.sun.jersey.spi.container.ResourceFilter',
|
|
|
|
'com.sun.jersey.spi.container.servlet.ServletContainer',
|
|
|
|
'com.sun.jersey.spi.inject.Injectable',
|
|
|
|
'com.sun.jersey.spi.inject.InjectableProvider',
|
2016-07-14 22:03:14 -04:00
|
|
|
'io.netty.bootstrap.Bootstrap',
|
|
|
|
'io.netty.bootstrap.ChannelFactory',
|
|
|
|
'io.netty.bootstrap.ServerBootstrap',
|
|
|
|
'io.netty.buffer.ByteBuf',
|
|
|
|
'io.netty.buffer.Unpooled',
|
|
|
|
'io.netty.channel.Channel',
|
|
|
|
'io.netty.channel.ChannelFuture',
|
|
|
|
'io.netty.channel.ChannelFutureListener',
|
|
|
|
'io.netty.channel.ChannelHandler',
|
|
|
|
'io.netty.channel.ChannelHandlerContext',
|
|
|
|
'io.netty.channel.ChannelInboundHandlerAdapter',
|
|
|
|
'io.netty.channel.ChannelInitializer',
|
|
|
|
'io.netty.channel.ChannelPipeline',
|
|
|
|
'io.netty.channel.EventLoopGroup',
|
|
|
|
'io.netty.channel.SimpleChannelInboundHandler',
|
|
|
|
'io.netty.channel.group.ChannelGroup',
|
|
|
|
'io.netty.channel.group.ChannelGroupFuture',
|
|
|
|
'io.netty.channel.group.DefaultChannelGroup',
|
|
|
|
'io.netty.channel.nio.NioEventLoopGroup',
|
|
|
|
'io.netty.channel.socket.SocketChannel',
|
|
|
|
'io.netty.channel.socket.nio.NioServerSocketChannel',
|
|
|
|
'io.netty.channel.socket.nio.NioSocketChannel',
|
|
|
|
'io.netty.handler.codec.http.DefaultFullHttpRequest',
|
|
|
|
'io.netty.handler.codec.http.DefaultFullHttpResponse',
|
|
|
|
'io.netty.handler.codec.http.DefaultHttpResponse',
|
|
|
|
'io.netty.handler.codec.http.HttpContent',
|
|
|
|
'io.netty.handler.codec.http.HttpHeaders',
|
|
|
|
'io.netty.handler.codec.http.HttpMethod',
|
|
|
|
'io.netty.handler.codec.http.HttpRequest',
|
|
|
|
'io.netty.handler.codec.http.HttpRequestDecoder',
|
|
|
|
'io.netty.handler.codec.http.HttpRequestEncoder',
|
|
|
|
'io.netty.handler.codec.http.HttpResponseEncoder',
|
|
|
|
'io.netty.handler.codec.http.HttpResponseStatus',
|
|
|
|
'io.netty.handler.codec.http.HttpVersion',
|
|
|
|
'io.netty.handler.codec.http.QueryStringDecoder',
|
|
|
|
'io.netty.handler.codec.string.StringEncoder',
|
|
|
|
'io.netty.handler.ssl.SslHandler',
|
|
|
|
'io.netty.handler.stream.ChunkedStream',
|
|
|
|
'io.netty.handler.stream.ChunkedWriteHandler',
|
|
|
|
'io.netty.util.concurrent.GlobalEventExecutor',
|
2015-12-28 22:38:55 -05:00
|
|
|
'javax.ws.rs.core.Context',
|
|
|
|
'javax.ws.rs.core.MediaType',
|
|
|
|
'javax.ws.rs.core.MultivaluedMap',
|
|
|
|
'javax.ws.rs.core.Response$ResponseBuilder',
|
|
|
|
'javax.ws.rs.core.Response$Status',
|
|
|
|
'javax.ws.rs.core.Response',
|
|
|
|
'javax.ws.rs.core.StreamingOutput',
|
|
|
|
'javax.ws.rs.core.UriBuilder',
|
|
|
|
'javax.ws.rs.ext.ExceptionMapper',
|
|
|
|
'jdiff.JDiff',
|
|
|
|
'org.apache.avalon.framework.logger.Logger',
|
|
|
|
'org.apache.avro.Schema',
|
|
|
|
'org.apache.avro.file.DataFileReader',
|
|
|
|
'org.apache.avro.file.FileReader',
|
|
|
|
'org.apache.avro.file.SeekableInput',
|
|
|
|
'org.apache.avro.generic.GenericDatumReader',
|
|
|
|
'org.apache.avro.generic.GenericDatumWriter',
|
|
|
|
'org.apache.avro.io.BinaryDecoder',
|
|
|
|
'org.apache.avro.io.BinaryEncoder',
|
|
|
|
'org.apache.avro.io.DatumReader',
|
|
|
|
'org.apache.avro.io.DatumWriter',
|
|
|
|
'org.apache.avro.io.DecoderFactory',
|
|
|
|
'org.apache.avro.io.EncoderFactory',
|
|
|
|
'org.apache.avro.io.JsonEncoder',
|
|
|
|
'org.apache.avro.reflect.ReflectData',
|
|
|
|
'org.apache.avro.reflect.ReflectDatumReader',
|
|
|
|
'org.apache.avro.reflect.ReflectDatumWriter',
|
|
|
|
'org.apache.avro.specific.SpecificDatumReader',
|
|
|
|
'org.apache.avro.specific.SpecificDatumWriter',
|
|
|
|
'org.apache.avro.specific.SpecificRecord',
|
|
|
|
'org.apache.commons.beanutils.BeanUtils',
|
|
|
|
'org.apache.commons.beanutils.DynaBean',
|
|
|
|
'org.apache.commons.beanutils.DynaClass',
|
|
|
|
'org.apache.commons.beanutils.DynaProperty',
|
|
|
|
'org.apache.commons.beanutils.PropertyUtils',
|
|
|
|
'org.apache.commons.compress.archivers.tar.TarArchiveEntry',
|
|
|
|
'org.apache.commons.compress.archivers.tar.TarArchiveInputStream',
|
|
|
|
'org.apache.commons.codec.DecoderException',
|
|
|
|
'org.apache.commons.codec.binary.Base64',
|
|
|
|
'org.apache.commons.codec.binary.Hex',
|
|
|
|
'org.apache.commons.codec.digest.DigestUtils',
|
|
|
|
'org.apache.commons.daemon.Daemon',
|
|
|
|
'org.apache.commons.daemon.DaemonContext',
|
|
|
|
'org.apache.commons.digester.AbstractObjectCreationFactory',
|
|
|
|
'org.apache.commons.digester.CallMethodRule',
|
|
|
|
'org.apache.commons.digester.Digester',
|
|
|
|
'org.apache.commons.digester.ObjectCreationFactory',
|
|
|
|
'org.apache.commons.digester.substitution.MultiVariableExpander',
|
|
|
|
'org.apache.commons.digester.substitution.VariableSubstitutor',
|
|
|
|
'org.apache.commons.digester.xmlrules.DigesterLoader',
|
|
|
|
'org.apache.commons.httpclient.util.URIUtil',
|
|
|
|
'org.apache.commons.jxpath.JXPathContext',
|
|
|
|
'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl',
|
|
|
|
'org.apache.commons.jxpath.ri.QName',
|
|
|
|
'org.apache.commons.jxpath.ri.compiler.NodeNameTest',
|
|
|
|
'org.apache.commons.jxpath.ri.compiler.NodeTest',
|
|
|
|
'org.apache.commons.jxpath.ri.compiler.NodeTypeTest',
|
|
|
|
'org.apache.commons.jxpath.ri.model.NodeIterator',
|
|
|
|
'org.apache.commons.jxpath.ri.model.NodePointer',
|
|
|
|
'org.apache.commons.jxpath.ri.model.NodePointerFactory',
|
|
|
|
'org.apache.commons.math3.util.ArithmeticUtils',
|
|
|
|
'org.apache.commons.net.ftp.FTPClient',
|
|
|
|
'org.apache.commons.net.ftp.FTPFile',
|
|
|
|
'org.apache.commons.net.ftp.FTPReply',
|
|
|
|
'org.apache.commons.net.util.SubnetUtils$SubnetInfo',
|
|
|
|
'org.apache.commons.net.util.SubnetUtils',
|
|
|
|
'org.apache.curator.ensemble.fixed.FixedEnsembleProvider',
|
|
|
|
'org.apache.curator.framework.CuratorFramework',
|
|
|
|
'org.apache.curator.framework.CuratorFrameworkFactory$Builder',
|
|
|
|
'org.apache.curator.framework.CuratorFrameworkFactory',
|
|
|
|
'org.apache.curator.framework.api.ACLBackgroundPathAndBytesable',
|
|
|
|
'org.apache.curator.framework.api.ACLProvider',
|
|
|
|
'org.apache.curator.framework.api.BackgroundPathAndBytesable',
|
|
|
|
'org.apache.curator.framework.api.ChildrenDeletable',
|
|
|
|
'org.apache.curator.framework.api.CreateBuilder',
|
|
|
|
'org.apache.curator.framework.api.DeleteBuilder',
|
|
|
|
'org.apache.curator.framework.api.ExistsBuilder',
|
|
|
|
'org.apache.curator.framework.api.GetChildrenBuilder',
|
|
|
|
'org.apache.curator.framework.api.GetDataBuilder',
|
|
|
|
'org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable',
|
|
|
|
'org.apache.curator.framework.api.SetDataBuilder',
|
|
|
|
'org.apache.curator.framework.api.WatchPathable',
|
|
|
|
'org.apache.curator.framework.imps.DefaultACLProvider',
|
|
|
|
'org.apache.curator.framework.listen.ListenerContainer',
|
|
|
|
'org.apache.curator.framework.recipes.cache.ChildData',
|
|
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCache$StartMode',
|
|
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCache',
|
|
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent$Type',
|
|
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent',
|
|
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheListener',
|
|
|
|
'org.apache.curator.framework.recipes.locks.Reaper$Mode',
|
|
|
|
'org.apache.curator.framework.recipes.locks.Reaper',
|
|
|
|
'org.apache.curator.framework.recipes.shared.SharedCount',
|
|
|
|
'org.apache.curator.framework.recipes.shared.VersionedValue',
|
|
|
|
'org.apache.curator.retry.ExponentialBackoffRetry',
|
|
|
|
'org.apache.curator.retry.RetryNTimes',
|
|
|
|
'org.apache.curator.utils.CloseableScheduledExecutorService',
|
|
|
|
'org.apache.curator.utils.CloseableUtils',
|
|
|
|
'org.apache.curator.utils.EnsurePath',
|
|
|
|
'org.apache.curator.utils.PathUtils',
|
|
|
|
'org.apache.curator.utils.ThreadUtils',
|
|
|
|
'org.apache.curator.utils.ZKPaths',
|
|
|
|
'org.apache.directory.server.kerberos.shared.keytab.Keytab',
|
|
|
|
'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry',
|
|
|
|
'org.apache.http.NameValuePair',
|
|
|
|
'org.apache.http.client.utils.URIBuilder',
|
|
|
|
'org.apache.http.client.utils.URLEncodedUtils',
|
|
|
|
'org.apache.log.Hierarchy',
|
|
|
|
'org.apache.log.Logger',
|
|
|
|
'org.apache.tools.ant.BuildException',
|
|
|
|
'org.apache.tools.ant.DirectoryScanner',
|
|
|
|
'org.apache.tools.ant.Task',
|
|
|
|
'org.apache.tools.ant.taskdefs.Execute',
|
|
|
|
'org.apache.tools.ant.types.FileSet',
|
|
|
|
'org.apache.xml.serialize.OutputFormat',
|
|
|
|
'org.apache.xml.serialize.XMLSerializer',
|
|
|
|
'org.apache.zookeeper.AsyncCallback$StatCallback',
|
|
|
|
'org.apache.zookeeper.AsyncCallback$StringCallback',
|
|
|
|
'org.apache.zookeeper.CreateMode',
|
|
|
|
'org.apache.zookeeper.KeeperException$Code',
|
|
|
|
'org.apache.zookeeper.KeeperException',
|
|
|
|
'org.apache.zookeeper.WatchedEvent',
|
|
|
|
'org.apache.zookeeper.Watcher$Event$EventType',
|
|
|
|
'org.apache.zookeeper.Watcher$Event$KeeperState',
|
|
|
|
'org.apache.zookeeper.Watcher',
|
|
|
|
'org.apache.zookeeper.ZKUtil',
|
|
|
|
'org.apache.zookeeper.ZooDefs$Ids',
|
|
|
|
'org.apache.zookeeper.ZooKeeper',
|
|
|
|
'org.apache.zookeeper.data.ACL',
|
|
|
|
'org.apache.zookeeper.data.Id',
|
|
|
|
'org.apache.zookeeper.data.Stat',
|
|
|
|
'org.codehaus.jackson.JsonEncoding',
|
|
|
|
'org.codehaus.jackson.JsonFactory',
|
|
|
|
'org.codehaus.jackson.JsonGenerator',
|
|
|
|
'org.codehaus.jackson.JsonGenerator$Feature',
|
|
|
|
'org.codehaus.jackson.JsonNode',
|
|
|
|
'org.codehaus.jackson.map.MappingJsonFactory',
|
|
|
|
'org.codehaus.jackson.map.ObjectMapper',
|
|
|
|
'org.codehaus.jackson.map.ObjectReader',
|
|
|
|
'org.codehaus.jackson.map.ObjectWriter',
|
|
|
|
'org.codehaus.jackson.node.ContainerNode',
|
|
|
|
'org.codehaus.jackson.type.TypeReference',
|
|
|
|
'org.codehaus.jackson.util.MinimalPrettyPrinter',
|
|
|
|
'org.fusesource.leveldbjni.JniDBFactory',
|
|
|
|
'org.iq80.leveldb.DB',
|
|
|
|
'org.iq80.leveldb.Options',
|
|
|
|
'org.iq80.leveldb.WriteBatch',
|
|
|
|
'org.mortbay.jetty.Connector',
|
|
|
|
'org.mortbay.jetty.Handler',
|
|
|
|
'org.mortbay.jetty.InclusiveByteRange',
|
|
|
|
'org.mortbay.jetty.MimeTypes',
|
|
|
|
'org.mortbay.jetty.NCSARequestLog',
|
|
|
|
'org.mortbay.jetty.RequestLog',
|
|
|
|
'org.mortbay.jetty.Server',
|
|
|
|
'org.mortbay.jetty.handler.ContextHandler$SContext',
|
|
|
|
'org.mortbay.jetty.handler.ContextHandler',
|
|
|
|
'org.mortbay.jetty.handler.ContextHandlerCollection',
|
|
|
|
'org.mortbay.jetty.handler.HandlerCollection',
|
|
|
|
'org.mortbay.jetty.handler.RequestLogHandler',
|
|
|
|
'org.mortbay.jetty.nio.SelectChannelConnector',
|
|
|
|
'org.mortbay.jetty.security.SslSocketConnector',
|
|
|
|
'org.mortbay.jetty.servlet.AbstractSessionManager',
|
|
|
|
'org.mortbay.jetty.servlet.Context',
|
|
|
|
'org.mortbay.jetty.servlet.DefaultServlet',
|
|
|
|
'org.mortbay.jetty.servlet.FilterHolder',
|
|
|
|
'org.mortbay.jetty.servlet.FilterMapping',
|
|
|
|
'org.mortbay.jetty.servlet.ServletHandler',
|
|
|
|
'org.mortbay.jetty.servlet.ServletHolder',
|
|
|
|
'org.mortbay.jetty.servlet.SessionHandler',
|
|
|
|
'org.mortbay.jetty.webapp.WebAppContext',
|
|
|
|
'org.mortbay.log.Log',
|
|
|
|
'org.mortbay.thread.QueuedThreadPool',
|
|
|
|
'org.mortbay.util.MultiException',
|
|
|
|
'org.mortbay.util.ajax.JSON$Convertible',
|
|
|
|
'org.mortbay.util.ajax.JSON$Output',
|
|
|
|
'org.mortbay.util.ajax.JSON',
|
|
|
|
'org.znerd.xmlenc.XMLOutputter',
|
2015-12-18 23:45:05 -05:00
|
|
|
|
|
|
|
// internal java api: sun.net.dns.ResolverConfiguration
|
|
|
|
// internal java api: sun.net.util.IPAddressUtil
|
|
|
|
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
|
|
|
|
|
|
|
|
// internal java api: sun.misc.Unsafe
|
|
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
|
|
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
|
|
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
|
|
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
|
|
|
|
'org.apache.hadoop.io.nativeio.NativeIO',
|
|
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
|
|
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
|
|
|
|
|
|
|
|
// internal java api: sun.nio.ch.DirectBuffer
|
|
|
|
// internal java api: sun.misc.Cleaner
|
|
|
|
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
|
|
|
|
'org.apache.hadoop.crypto.CryptoStreamUtils',
|
|
|
|
|
|
|
|
// internal java api: sun.misc.SignalHandler
|
|
|
|
'org.apache.hadoop.util.SignalLogger$Handler',
|
2016-02-16 08:46:03 -05:00
|
|
|
|
|
|
|
// optional dependencies of slf4j-api
|
|
|
|
'org.slf4j.impl.StaticMDCBinder',
|
|
|
|
'org.slf4j.impl.StaticMarkerBinder',
|
2016-08-09 13:34:23 -04:00
|
|
|
|
|
|
|
'org.apache.log4j.AppenderSkeleton',
|
|
|
|
'org.apache.log4j.AsyncAppender',
|
|
|
|
'org.apache.log4j.helpers.ISO8601DateFormat',
|
|
|
|
'org.apache.log4j.spi.ThrowableInformation'
|
2015-12-18 23:45:05 -05:00
|
|
|
]
|
2017-01-19 03:56:54 -05:00
|
|
|
|
|
|
|
// Gradle 2.13 bundles org.slf4j.impl.StaticLoggerBinder in its core.jar which leaks into the forbidden APIs ant task
|
|
|
|
// Gradle 2.14+ does not bundle this class anymore so we need to properly exclude it here.
|
|
|
|
if (GradleVersion.current() > GradleVersion.version("2.13")) {
|
|
|
|
thirdPartyAudit.excludes += ['org.slf4j.impl.StaticLoggerBinder']
|
|
|
|
}
|