OpenSearch/plugins/repository-hdfs/build.gradle

156 lines
6.1 KiB
Groovy
Raw Normal View History

/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//apply plugin: 'nebula.provided-base'
import org.apache.tools.ant.taskdefs.condition.Os
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
}
versions << [
'hadoop2': '2.7.1'
]
configurations {
hdfsFixture
}
dependencies {
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
2015-12-18 22:11:58 -05:00
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
compile 'com.google.guava:guava:16.0.1'
2015-12-18 22:11:58 -05:00
compile 'com.google.protobuf:protobuf-java:2.5.0'
compile 'commons-logging:commons-logging:1.1.3'
compile 'commons-collections:commons-collections:3.2.2'
compile 'commons-configuration:commons-configuration:1.6'
2015-12-18 22:11:58 -05:00
compile 'commons-io:commons-io:2.4'
compile 'commons-lang:commons-lang:2.6'
2015-12-18 22:11:58 -05:00
compile 'javax.servlet:servlet-api:2.5'
2015-12-20 22:08:18 -05:00
compile "org.slf4j:slf4j-api:${versions.slf4j}"
hdfsFixture project(':test:fixtures:hdfs-fixture')
}
dependencyLicenses {
mapping from: /hadoop-.*/, to: 'hadoop'
}
task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) {
dependsOn project.configurations.hdfsFixture
executable = new File(project.javaHome, 'bin/java')
args '-cp', "${ -> project.configurations.hdfsFixture.asPath }",
'hdfs.MiniHDFS',
baseDir
}
integTest {
boolean fixtureSupported = false;
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin");
}
}
} else {
fixtureSupported = true
}
if (fixtureSupported) {
dependsOn hdfsFixture
} else {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
// just tests that the plugin loads
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
}
}
compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes'
thirdPartyAudit.missingClasses = true
thirdPartyAudit.excludes = [
// note: the jersey ones may be bogus, see my bug report at forbidden-apis!
// internal java api: com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable
// internal java api: com.sun.jersey.api.core.HttpContext
// internal java api: com.sun.jersey.core.spi.component.ComponentScope
// internal java api: com.sun.jersey.spi.inject.Injectable
// internal java api: com.sun.jersey.core.spi.component.ComponentContext
'org.apache.hadoop.hdfs.web.resources.UserProvider',
// internal java api: com.sun.jersey.spi.container.ResourceFilters
'org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods',
// internal java api: com.sun.jersey.spi.container.servlet.ServletContainer
'org.apache.hadoop.http.HttpServer',
'org.apache.hadoop.http.HttpServer2',
// internal java api: com.sun.jersey.api.ParamException
'org.apache.hadoop.hdfs.web.resources.ExceptionHandler',
'org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ExceptionHandler',
'org.apache.hadoop.hdfs.web.ParamFilter',
// internal java api: com.sun.jersey.spi.container.ContainerRequestFilter
// internal java api: com.sun.jersey.spi.container.ContainerRequest
'org.apache.hadoop.hdfs.web.ParamFilter',
'org.apache.hadoop.hdfs.web.ParamFilter$1',
// internal java api: com.sun.jndi.ldap.LdapCtxFactory
'org.apache.hadoop.security.LdapGroupsMapping',
// internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
// internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
]