mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-26 23:07:45 +00:00
The test setup for hdfs is a little complicated for windows, needing to check if the hdfs fixture can be run at all. This was unfortunately not updated when the integ tests were reorganized into separate runner and cluster setups.
375 lines
16 KiB
Groovy
375 lines
16 KiB
Groovy
/*
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
* license agreements. See the NOTICE file distributed with
|
|
* this work for additional information regarding copyright
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
* not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing,
|
|
* software distributed under the License is distributed on an
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
* KIND, either express or implied. See the License for the
|
|
* specific language governing permissions and limitations
|
|
* under the License.
|
|
*/
|
|
|
|
import org.apache.tools.ant.taskdefs.condition.Os
|
|
import java.nio.file.Files
|
|
import java.nio.file.Path
|
|
import java.nio.file.Paths
|
|
|
|
esplugin {
|
|
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
|
|
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
|
|
}
|
|
|
|
versions << [
|
|
'hadoop2': '2.7.1'
|
|
]
|
|
|
|
configurations {
|
|
hdfsFixture
|
|
}
|
|
|
|
dependencies {
|
|
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
|
|
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
|
|
compile 'com.google.guava:guava:16.0.1'
|
|
compile 'com.google.protobuf:protobuf-java:2.5.0'
|
|
compile 'commons-logging:commons-logging:1.1.3'
|
|
compile 'commons-cli:commons-cli:1.2'
|
|
compile 'commons-collections:commons-collections:3.2.2'
|
|
compile 'commons-configuration:commons-configuration:1.6'
|
|
compile 'commons-io:commons-io:2.4'
|
|
compile 'commons-lang:commons-lang:2.6'
|
|
compile 'javax.servlet:servlet-api:2.5'
|
|
compile "org.slf4j:slf4j-api:${versions.slf4j}"
|
|
|
|
hdfsFixture project(':test:fixtures:hdfs-fixture')
|
|
}
|
|
|
|
dependencyLicenses {
|
|
mapping from: /hadoop-.*/, to: 'hadoop'
|
|
}
|
|
|
|
task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) {
|
|
dependsOn project.configurations.hdfsFixture
|
|
executable = new File(project.javaHome, 'bin/java')
|
|
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
|
|
args 'hdfs.MiniHDFS',
|
|
baseDir
|
|
}
|
|
|
|
boolean fixtureSupported = false;
|
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
|
// hdfs fixture will not start without hadoop native libraries on windows
|
|
String nativePath = System.getenv("HADOOP_HOME")
|
|
if (nativePath != null) {
|
|
Path path = Paths.get(nativePath);
|
|
if (Files.isDirectory(path) &&
|
|
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
|
|
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
|
|
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
|
|
fixtureSupported = true
|
|
} else {
|
|
throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin");
|
|
}
|
|
}
|
|
} else {
|
|
fixtureSupported = true
|
|
}
|
|
|
|
if (fixtureSupported) {
|
|
integTest.dependsOn hdfsFixture
|
|
} else {
|
|
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
|
|
// just tests that the plugin loads
|
|
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
|
|
}
|
|
|
|
thirdPartyAudit.excludes = [
|
|
// classes are missing, because we added hadoop jars one by one until tests pass.
|
|
'com.google.gson.stream.JsonReader',
|
|
'com.google.gson.stream.JsonWriter',
|
|
'com.jcraft.jsch.ChannelExec',
|
|
'com.jcraft.jsch.JSch',
|
|
'com.jcraft.jsch.Logger',
|
|
'com.jcraft.jsch.Session',
|
|
'com.sun.jersey.api.ParamException',
|
|
'com.sun.jersey.api.core.HttpContext',
|
|
'com.sun.jersey.core.spi.component.ComponentContext',
|
|
'com.sun.jersey.core.spi.component.ComponentScope',
|
|
'com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable',
|
|
'com.sun.jersey.spi.container.ContainerRequest',
|
|
'com.sun.jersey.spi.container.ContainerRequestFilter',
|
|
'com.sun.jersey.spi.container.ContainerResponseFilter',
|
|
'com.sun.jersey.spi.container.ResourceFilter',
|
|
'com.sun.jersey.spi.container.servlet.ServletContainer',
|
|
'com.sun.jersey.spi.inject.Injectable',
|
|
'com.sun.jersey.spi.inject.InjectableProvider',
|
|
'io.netty.bootstrap.Bootstrap',
|
|
'io.netty.bootstrap.ChannelFactory',
|
|
'io.netty.bootstrap.ServerBootstrap',
|
|
'io.netty.buffer.ByteBuf',
|
|
'io.netty.buffer.Unpooled',
|
|
'io.netty.channel.Channel',
|
|
'io.netty.channel.ChannelFuture',
|
|
'io.netty.channel.ChannelFutureListener',
|
|
'io.netty.channel.ChannelHandler',
|
|
'io.netty.channel.ChannelHandlerContext',
|
|
'io.netty.channel.ChannelInboundHandlerAdapter',
|
|
'io.netty.channel.ChannelInitializer',
|
|
'io.netty.channel.ChannelPipeline',
|
|
'io.netty.channel.EventLoopGroup',
|
|
'io.netty.channel.SimpleChannelInboundHandler',
|
|
'io.netty.channel.group.ChannelGroup',
|
|
'io.netty.channel.group.ChannelGroupFuture',
|
|
'io.netty.channel.group.DefaultChannelGroup',
|
|
'io.netty.channel.nio.NioEventLoopGroup',
|
|
'io.netty.channel.socket.SocketChannel',
|
|
'io.netty.channel.socket.nio.NioServerSocketChannel',
|
|
'io.netty.channel.socket.nio.NioSocketChannel',
|
|
'io.netty.handler.codec.http.DefaultFullHttpRequest',
|
|
'io.netty.handler.codec.http.DefaultFullHttpResponse',
|
|
'io.netty.handler.codec.http.DefaultHttpResponse',
|
|
'io.netty.handler.codec.http.HttpContent',
|
|
'io.netty.handler.codec.http.HttpHeaders',
|
|
'io.netty.handler.codec.http.HttpMethod',
|
|
'io.netty.handler.codec.http.HttpRequest',
|
|
'io.netty.handler.codec.http.HttpRequestDecoder',
|
|
'io.netty.handler.codec.http.HttpRequestEncoder',
|
|
'io.netty.handler.codec.http.HttpResponseEncoder',
|
|
'io.netty.handler.codec.http.HttpResponseStatus',
|
|
'io.netty.handler.codec.http.HttpVersion',
|
|
'io.netty.handler.codec.http.QueryStringDecoder',
|
|
'io.netty.handler.codec.string.StringEncoder',
|
|
'io.netty.handler.ssl.SslHandler',
|
|
'io.netty.handler.stream.ChunkedStream',
|
|
'io.netty.handler.stream.ChunkedWriteHandler',
|
|
'io.netty.util.concurrent.GlobalEventExecutor',
|
|
'javax.ws.rs.core.Context',
|
|
'javax.ws.rs.core.MediaType',
|
|
'javax.ws.rs.core.MultivaluedMap',
|
|
'javax.ws.rs.core.Response$ResponseBuilder',
|
|
'javax.ws.rs.core.Response$Status',
|
|
'javax.ws.rs.core.Response',
|
|
'javax.ws.rs.core.StreamingOutput',
|
|
'javax.ws.rs.core.UriBuilder',
|
|
'javax.ws.rs.ext.ExceptionMapper',
|
|
'jdiff.JDiff',
|
|
'org.apache.avalon.framework.logger.Logger',
|
|
'org.apache.avro.Schema',
|
|
'org.apache.avro.file.DataFileReader',
|
|
'org.apache.avro.file.FileReader',
|
|
'org.apache.avro.file.SeekableInput',
|
|
'org.apache.avro.generic.GenericDatumReader',
|
|
'org.apache.avro.generic.GenericDatumWriter',
|
|
'org.apache.avro.io.BinaryDecoder',
|
|
'org.apache.avro.io.BinaryEncoder',
|
|
'org.apache.avro.io.DatumReader',
|
|
'org.apache.avro.io.DatumWriter',
|
|
'org.apache.avro.io.DecoderFactory',
|
|
'org.apache.avro.io.EncoderFactory',
|
|
'org.apache.avro.io.JsonEncoder',
|
|
'org.apache.avro.reflect.ReflectData',
|
|
'org.apache.avro.reflect.ReflectDatumReader',
|
|
'org.apache.avro.reflect.ReflectDatumWriter',
|
|
'org.apache.avro.specific.SpecificDatumReader',
|
|
'org.apache.avro.specific.SpecificDatumWriter',
|
|
'org.apache.avro.specific.SpecificRecord',
|
|
'org.apache.commons.beanutils.BeanUtils',
|
|
'org.apache.commons.beanutils.DynaBean',
|
|
'org.apache.commons.beanutils.DynaClass',
|
|
'org.apache.commons.beanutils.DynaProperty',
|
|
'org.apache.commons.beanutils.PropertyUtils',
|
|
'org.apache.commons.compress.archivers.tar.TarArchiveEntry',
|
|
'org.apache.commons.compress.archivers.tar.TarArchiveInputStream',
|
|
'org.apache.commons.codec.DecoderException',
|
|
'org.apache.commons.codec.binary.Base64',
|
|
'org.apache.commons.codec.binary.Hex',
|
|
'org.apache.commons.codec.digest.DigestUtils',
|
|
'org.apache.commons.daemon.Daemon',
|
|
'org.apache.commons.daemon.DaemonContext',
|
|
'org.apache.commons.digester.AbstractObjectCreationFactory',
|
|
'org.apache.commons.digester.CallMethodRule',
|
|
'org.apache.commons.digester.Digester',
|
|
'org.apache.commons.digester.ObjectCreationFactory',
|
|
'org.apache.commons.digester.substitution.MultiVariableExpander',
|
|
'org.apache.commons.digester.substitution.VariableSubstitutor',
|
|
'org.apache.commons.digester.xmlrules.DigesterLoader',
|
|
'org.apache.commons.httpclient.util.URIUtil',
|
|
'org.apache.commons.jxpath.JXPathContext',
|
|
'org.apache.commons.jxpath.ri.JXPathContextReferenceImpl',
|
|
'org.apache.commons.jxpath.ri.QName',
|
|
'org.apache.commons.jxpath.ri.compiler.NodeNameTest',
|
|
'org.apache.commons.jxpath.ri.compiler.NodeTest',
|
|
'org.apache.commons.jxpath.ri.compiler.NodeTypeTest',
|
|
'org.apache.commons.jxpath.ri.model.NodeIterator',
|
|
'org.apache.commons.jxpath.ri.model.NodePointer',
|
|
'org.apache.commons.jxpath.ri.model.NodePointerFactory',
|
|
'org.apache.commons.math3.util.ArithmeticUtils',
|
|
'org.apache.commons.net.ftp.FTPClient',
|
|
'org.apache.commons.net.ftp.FTPFile',
|
|
'org.apache.commons.net.ftp.FTPReply',
|
|
'org.apache.commons.net.util.SubnetUtils$SubnetInfo',
|
|
'org.apache.commons.net.util.SubnetUtils',
|
|
'org.apache.curator.ensemble.fixed.FixedEnsembleProvider',
|
|
'org.apache.curator.framework.CuratorFramework',
|
|
'org.apache.curator.framework.CuratorFrameworkFactory$Builder',
|
|
'org.apache.curator.framework.CuratorFrameworkFactory',
|
|
'org.apache.curator.framework.api.ACLBackgroundPathAndBytesable',
|
|
'org.apache.curator.framework.api.ACLProvider',
|
|
'org.apache.curator.framework.api.BackgroundPathAndBytesable',
|
|
'org.apache.curator.framework.api.ChildrenDeletable',
|
|
'org.apache.curator.framework.api.CreateBuilder',
|
|
'org.apache.curator.framework.api.DeleteBuilder',
|
|
'org.apache.curator.framework.api.ExistsBuilder',
|
|
'org.apache.curator.framework.api.GetChildrenBuilder',
|
|
'org.apache.curator.framework.api.GetDataBuilder',
|
|
'org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable',
|
|
'org.apache.curator.framework.api.SetDataBuilder',
|
|
'org.apache.curator.framework.api.WatchPathable',
|
|
'org.apache.curator.framework.imps.DefaultACLProvider',
|
|
'org.apache.curator.framework.listen.ListenerContainer',
|
|
'org.apache.curator.framework.recipes.cache.ChildData',
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCache$StartMode',
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCache',
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent$Type',
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent',
|
|
'org.apache.curator.framework.recipes.cache.PathChildrenCacheListener',
|
|
'org.apache.curator.framework.recipes.locks.Reaper$Mode',
|
|
'org.apache.curator.framework.recipes.locks.Reaper',
|
|
'org.apache.curator.framework.recipes.shared.SharedCount',
|
|
'org.apache.curator.framework.recipes.shared.VersionedValue',
|
|
'org.apache.curator.retry.ExponentialBackoffRetry',
|
|
'org.apache.curator.retry.RetryNTimes',
|
|
'org.apache.curator.utils.CloseableScheduledExecutorService',
|
|
'org.apache.curator.utils.CloseableUtils',
|
|
'org.apache.curator.utils.EnsurePath',
|
|
'org.apache.curator.utils.PathUtils',
|
|
'org.apache.curator.utils.ThreadUtils',
|
|
'org.apache.curator.utils.ZKPaths',
|
|
'org.apache.directory.server.kerberos.shared.keytab.Keytab',
|
|
'org.apache.directory.server.kerberos.shared.keytab.KeytabEntry',
|
|
'org.apache.http.NameValuePair',
|
|
'org.apache.http.client.utils.URIBuilder',
|
|
'org.apache.http.client.utils.URLEncodedUtils',
|
|
'org.apache.log.Hierarchy',
|
|
'org.apache.log.Logger',
|
|
'org.apache.tools.ant.BuildException',
|
|
'org.apache.tools.ant.DirectoryScanner',
|
|
'org.apache.tools.ant.Task',
|
|
'org.apache.tools.ant.taskdefs.Execute',
|
|
'org.apache.tools.ant.types.FileSet',
|
|
'org.apache.xml.serialize.OutputFormat',
|
|
'org.apache.xml.serialize.XMLSerializer',
|
|
'org.apache.zookeeper.AsyncCallback$StatCallback',
|
|
'org.apache.zookeeper.AsyncCallback$StringCallback',
|
|
'org.apache.zookeeper.CreateMode',
|
|
'org.apache.zookeeper.KeeperException$Code',
|
|
'org.apache.zookeeper.KeeperException',
|
|
'org.apache.zookeeper.WatchedEvent',
|
|
'org.apache.zookeeper.Watcher$Event$EventType',
|
|
'org.apache.zookeeper.Watcher$Event$KeeperState',
|
|
'org.apache.zookeeper.Watcher',
|
|
'org.apache.zookeeper.ZKUtil',
|
|
'org.apache.zookeeper.ZooDefs$Ids',
|
|
'org.apache.zookeeper.ZooKeeper',
|
|
'org.apache.zookeeper.data.ACL',
|
|
'org.apache.zookeeper.data.Id',
|
|
'org.apache.zookeeper.data.Stat',
|
|
'org.codehaus.jackson.JsonEncoding',
|
|
'org.codehaus.jackson.JsonFactory',
|
|
'org.codehaus.jackson.JsonGenerator',
|
|
'org.codehaus.jackson.JsonGenerator$Feature',
|
|
'org.codehaus.jackson.JsonNode',
|
|
'org.codehaus.jackson.map.MappingJsonFactory',
|
|
'org.codehaus.jackson.map.ObjectMapper',
|
|
'org.codehaus.jackson.map.ObjectReader',
|
|
'org.codehaus.jackson.map.ObjectWriter',
|
|
'org.codehaus.jackson.node.ContainerNode',
|
|
'org.codehaus.jackson.type.TypeReference',
|
|
'org.codehaus.jackson.util.MinimalPrettyPrinter',
|
|
'org.fusesource.leveldbjni.JniDBFactory',
|
|
'org.iq80.leveldb.DB',
|
|
'org.iq80.leveldb.Options',
|
|
'org.iq80.leveldb.WriteBatch',
|
|
'org.mortbay.jetty.Connector',
|
|
'org.mortbay.jetty.Handler',
|
|
'org.mortbay.jetty.InclusiveByteRange',
|
|
'org.mortbay.jetty.MimeTypes',
|
|
'org.mortbay.jetty.NCSARequestLog',
|
|
'org.mortbay.jetty.RequestLog',
|
|
'org.mortbay.jetty.Server',
|
|
'org.mortbay.jetty.handler.ContextHandler$SContext',
|
|
'org.mortbay.jetty.handler.ContextHandler',
|
|
'org.mortbay.jetty.handler.ContextHandlerCollection',
|
|
'org.mortbay.jetty.handler.HandlerCollection',
|
|
'org.mortbay.jetty.handler.RequestLogHandler',
|
|
'org.mortbay.jetty.nio.SelectChannelConnector',
|
|
'org.mortbay.jetty.security.SslSocketConnector',
|
|
'org.mortbay.jetty.servlet.AbstractSessionManager',
|
|
'org.mortbay.jetty.servlet.Context',
|
|
'org.mortbay.jetty.servlet.DefaultServlet',
|
|
'org.mortbay.jetty.servlet.FilterHolder',
|
|
'org.mortbay.jetty.servlet.FilterMapping',
|
|
'org.mortbay.jetty.servlet.ServletHandler',
|
|
'org.mortbay.jetty.servlet.ServletHolder',
|
|
'org.mortbay.jetty.servlet.SessionHandler',
|
|
'org.mortbay.jetty.webapp.WebAppContext',
|
|
'org.mortbay.log.Log',
|
|
'org.mortbay.thread.QueuedThreadPool',
|
|
'org.mortbay.util.MultiException',
|
|
'org.mortbay.util.ajax.JSON$Convertible',
|
|
'org.mortbay.util.ajax.JSON$Output',
|
|
'org.mortbay.util.ajax.JSON',
|
|
'org.znerd.xmlenc.XMLOutputter',
|
|
|
|
// internal java api: sun.net.dns.ResolverConfiguration
|
|
// internal java api: sun.net.util.IPAddressUtil
|
|
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
|
|
|
|
// internal java api: sun.misc.Unsafe
|
|
'com.google.common.cache.Striped64',
|
|
'com.google.common.cache.Striped64$1',
|
|
'com.google.common.cache.Striped64$Cell',
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
|
|
'org.apache.hadoop.io.nativeio.NativeIO',
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
|
|
|
|
// internal java api: sun.nio.ch.DirectBuffer
|
|
// internal java api: sun.misc.Cleaner
|
|
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
|
|
'org.apache.hadoop.crypto.CryptoStreamUtils',
|
|
|
|
// internal java api: sun.misc.SignalHandler
|
|
'org.apache.hadoop.util.SignalLogger$Handler',
|
|
|
|
// optional dependencies of slf4j-api
|
|
'org.slf4j.impl.StaticMDCBinder',
|
|
'org.slf4j.impl.StaticMarkerBinder',
|
|
|
|
'org.apache.log4j.AppenderSkeleton',
|
|
'org.apache.log4j.AsyncAppender',
|
|
'org.apache.log4j.helpers.ISO8601DateFormat',
|
|
'org.apache.log4j.spi.ThrowableInformation'
|
|
]
|
|
|
|
// Gradle 2.13 bundles org.slf4j.impl.StaticLoggerBinder in its core.jar which leaks into the forbidden APIs ant task
|
|
// Gradle 2.14+ does not bundle this class anymore so we need to properly exclude it here.
|
|
if (GradleVersion.current() > GradleVersion.version("2.13")) {
|
|
thirdPartyAudit.excludes += ['org.slf4j.impl.StaticLoggerBinder']
|
|
}
|