108 lines
4.6 KiB
Groovy

/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//apply plugin: 'nebula.provided-base'
esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin'
}
versions << [
'hadoop2': '2.7.1'
]
dependencies {
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
compile 'org.apache.htrace:htrace-core:3.1.0-incubating'
compile 'com.google.guava:guava:16.0.1'
compile 'com.google.protobuf:protobuf-java:2.5.0'
compile 'commons-logging:commons-logging:1.1.3'
compile 'commons-collections:commons-collections:3.2.2'
compile 'commons-configuration:commons-configuration:1.6'
compile 'commons-io:commons-io:2.4'
compile 'commons-lang:commons-lang:2.6'
compile 'javax.servlet:servlet-api:2.5'
// we need this one, its not really 'provided' compile 'org.slf4j:slf4j-api:${versions.slf4j}'
}
dependencyLicenses {
mapping from: /hadoop-.*/, to: 'hadoop'
}
compileJava.options.compilerArgs << '-Xlint:-deprecation,-rawtypes'
thirdPartyAudit.missingClasses = true
thirdPartyAudit.excludes = [
// note: the jersey ones may be bogus, see my bug report at forbidden-apis!
// internal java api: com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable
// internal java api: com.sun.jersey.api.core.HttpContext
// internal java api: com.sun.jersey.core.spi.component.ComponentScope
// internal java api: com.sun.jersey.spi.inject.Injectable
// internal java api: com.sun.jersey.core.spi.component.ComponentContext
'org.apache.hadoop.hdfs.web.resources.UserProvider',
// internal java api: com.sun.jersey.spi.container.ResourceFilters
'org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods',
// internal java api: com.sun.jersey.spi.container.servlet.ServletContainer
'org.apache.hadoop.http.HttpServer',
'org.apache.hadoop.http.HttpServer2',
// internal java api: com.sun.jersey.api.ParamException
'org.apache.hadoop.hdfs.web.resources.ExceptionHandler',
'org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ExceptionHandler',
'org.apache.hadoop.hdfs.web.ParamFilter',
// internal java api: com.sun.jersey.spi.container.ContainerRequestFilter
// internal java api: com.sun.jersey.spi.container.ContainerRequest
'org.apache.hadoop.hdfs.web.ParamFilter',
'org.apache.hadoop.hdfs.web.ParamFilter$1',
// internal java api: com.sun.jndi.ldap.LdapCtxFactory
'org.apache.hadoop.security.LdapGroupsMapping',
// internal java api: sun.net.dns.ResolverConfiguration
// internal java api: sun.net.util.IPAddressUtil
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
// internal java api: sun.misc.Unsafe
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.nio.ch.DirectBuffer
// internal java api: sun.misc.Cleaner
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
'org.apache.hadoop.crypto.CryptoStreamUtils',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
]