OpenSearch/plugins/repository-hdfs/build.gradle

460 lines
21 KiB
Groovy

/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.tools.ant.taskdefs.condition.Os
import org.opensearch.gradle.info.BuildParams
import org.opensearch.gradle.test.RestIntegTestTask
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import static org.opensearch.gradle.PropertyNormalization.IGNORE_VALUE
apply plugin: 'opensearch.test.fixtures'
apply plugin: 'opensearch.rest-resources'
apply plugin: 'opensearch.rest-test'
opensearchplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.opensearch.repositories.hdfs.HdfsPlugin'
}
versions << [
'hadoop3': '3.3.1'
]
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
configurations {
hdfsFixture
}
dependencies {
api "org.apache.hadoop:hadoop-client-api:${versions.hadoop3}"
runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop3}"
api "org.apache.hadoop:hadoop-hdfs:${versions.hadoop3}"
api 'org.apache.htrace:htrace-core4:4.2.0-incubating'
api "org.apache.logging.log4j:log4j-core:${versions.log4j}"
api 'org.apache.avro:avro:1.10.2'
api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}"
api 'com.google.code.gson:gson:2.9.0'
runtimeOnly 'com.google.guava:guava:30.1.1-jre'
api 'com.google.protobuf:protobuf-java:3.20.0'
api "commons-logging:commons-logging:${versions.commonslogging}"
api 'commons-cli:commons-cli:1.2'
api "commons-codec:commons-codec:${versions.commonscodec}"
api 'commons-collections:commons-collections:3.2.2'
api 'org.apache.commons:commons-compress:1.21'
api 'org.apache.commons:commons-configuration2:2.7'
api 'commons-io:commons-io:2.11.0'
api 'org.apache.commons:commons-lang3:3.12.0'
implementation 'com.google.re2j:re2j:1.1'
api 'javax.servlet:servlet-api:2.5'
api "org.slf4j:slf4j-api:${versions.slf4j}"
api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
api 'net.minidev:json-smart:2.4.8'
api 'org.apache.zookeeper:zookeeper:3.7.0'
api "io.netty:netty-all:${versions.netty}"
implementation 'com.fasterxml.woodstox:woodstox-core:6.2.8'
implementation 'org.codehaus.woodstox:stax2-api:4.2.1'
hdfsFixture project(':test:fixtures:hdfs-fixture')
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
if (isEclipse == false) {
testRuntimeOnly files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.opensearch.org.keytab").parent)
}
}
restResources {
restApi {
includeCore '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
}
}
normalization {
runtimeClasspath {
// ignore generated keytab files for the purposes of build avoidance
ignore '*.keytab'
// ignore fixture ports file which is on the classpath primarily to pacify the security manager
ignore 'ports'
}
}
tasks.named("dependencyLicenses").configure {
mapping from: /hadoop-.*/, to: 'hadoop'
}
thirdPartyAudit {
ignoreViolations(
// uses internal java api: sun.misc.Unsafe
'com.google.protobuf.MessageSchema',
'com.google.protobuf.UnsafeUtil',
'com.google.protobuf.UnsafeUtil$1',
'com.google.protobuf.UnsafeUtil$Android32MemoryAccessor',
'com.google.protobuf.UnsafeUtil$Android64MemoryAccessor',
'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor',
'com.google.protobuf.UnsafeUtil$MemoryAccessor'
)
}
tasks.named("integTest").configure {
it.dependsOn(project.tasks.named("bundlePlugin"))
}
testClusters.integTest {
plugin(project.tasks.bundlePlugin.archiveFile)
}
String realm = "BUILD.OPENSEARCH.ORG"
String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
project(':test:fixtures:krb5kdc-fixture').tasks.preProcessFixture {
dependsOn tasks.named("jarHell")
dependsOn tasks.named("testingConventions")
}
// Create HDFS File System Testing Fixtures for HA/Secure combinations
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
def tsk = project.tasks.register(fixtureName, org.opensearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
executable = "${BuildParams.runtimeJavaHome}/bin/java"
env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
maxWaitInSeconds 60
onlyIf { BuildParams.inFipsJvm == false }
waitCondition = { fixture, ant ->
// the hdfs.MiniHDFS fixture writes the ports file when
// it's ready, so we can just wait for the file to exist
return fixture.portsFile.exists()
}
final List<String> miniHDFSArgs = []
// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}");
if (BuildParams.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
}
}
// If it's an HA fixture, set a nameservice to use in the JVM options
if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
}
// Common options
miniHDFSArgs.add('hdfs.MiniHDFS')
miniHDFSArgs.add(baseDir)
// If it's a secure fixture, then set the principal name and keytab locations to use for auth.
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("hdfs/hdfs.build.opensearch.org@${realm}")
miniHDFSArgs.add(
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.opensearch.org.keytab")
)
}
args miniHDFSArgs.toArray()
}
// TODO: The task configuration block has side effects that require it currently to be always executed.
// Otherwise tests start failing. Therefore we enforce the task creation for now.
tsk.get()
}
Set disabledIntegTestTaskNames = []
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
task "${integTestTaskName}"(type: RestIntegTestTask) {
description = "Runs rest tests against an opensearch cluster with HDFS."
dependsOn(project.bundlePlugin)
if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
enabled = false;
}
if (integTestTaskName.contains("Secure")) {
if (integTestTaskName.contains("Ha")) {
dependsOn secureHaHdfsFixture
} else {
dependsOn secureHdfsFixture
}
}
onlyIf { BuildParams.inFipsJvm == false }
if (integTestTaskName.contains("Ha")) {
Path portsFile
File portsFileDir = file("${workingDir}/hdfsFixture")
if (integTestTaskName.contains("Secure")) {
portsFile = buildDir.toPath()
.resolve("fixtures")
.resolve("secureHaHdfsFixture")
.resolve("ports")
} else {
portsFile = buildDir.toPath()
.resolve("fixtures")
.resolve("haHdfsFixture")
.resolve("ports")
}
nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
classpath += files(portsFileDir)
// Copy ports file to separate location which is placed on the test classpath
doFirst {
mkdir(portsFileDir)
copy {
from portsFile
into portsFileDir
}
}
}
if (integTestTaskName.contains("Secure")) {
if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
nonInputProperties.systemProperty "test.krb5.principal.es", "opensearch@${realm}"
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.opensearch.org@${realm}"
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
nonInputProperties.systemProperty(
"test.krb5.keytab.hdfs",
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.opensearch.org.keytab")
)
}
}
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_1_8) {
jvmArgs += ["--add-opens", "java.security.jgss/sun.security.krb5=ALL-UNNAMED"]
}
}
testClusters."${integTestTaskName}" {
plugin(bundlePlugin.archiveFile)
if (integTestTaskName.contains("Secure")) {
systemProperty "java.security.krb5.conf", krb5conf
extraConfigFile(
"repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "opensearch.keytab")}"), IGNORE_VALUE
)
}
}
}
// Determine HDFS Fixture compatibility for the current build environment.
boolean fixtureSupported = false
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
}
}
} else {
fixtureSupported = true
}
boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
if (legalPath == false) {
fixtureSupported = false
}
// Always ignore HA integration tests in the normal integration test runner, they are included below as
// part of their own HA-specific integration test tasks.
integTest {
onlyIf { BuildParams.inFipsJvm == false }
exclude('**/Ha*TestSuiteIT.class')
}
if (fixtureSupported) {
// Check depends on the HA test. Already depends on the standard test.
project.check.dependsOn(integTestHa)
// Both standard and HA tests depend on their respective HDFS fixtures
integTest.dependsOn hdfsFixture
integTestHa.dependsOn haHdfsFixture
// The normal test runner only runs the standard hdfs rest tests
integTest {
systemProperty 'tests.rest.suite', 'hdfs_repository'
}
// Only include the HA integration tests for the HA test task
integTestHa {
setIncludes(['**/Ha*TestSuiteIT.class'])
}
} else {
if (legalPath) {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
} else {
logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
}
// The normal integration test runner will just test that the plugin loads
integTest {
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
}
// HA fixture is unsupported. Don't run them.
integTestHa.setEnabled(false)
}
check.dependsOn(integTestSecure, integTestSecureHa)
// Run just the secure hdfs rest test suite.
integTestSecure {
systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
}
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
integTestSecure {
exclude('**/Ha*TestSuiteIT.class')
}
// Only include the HA integration tests for the HA test task
integTestSecureHa {
setIncludes(['**/Ha*TestSuiteIT.class'])
}
thirdPartyAudit {
ignoreMissingClasses()
ignoreViolations(
// internal java api: sun.misc.Unsafe
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
'org.apache.hadoop.io.nativeio.NativeIO',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
// internal java api: sun.misc.SignalHandler
'org.apache.hadoop.util.SignalLogger$Handler',
'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper',
'org.apache.hadoop.hdfs.server.datanode.checker.AbstractFuture$UnsafeAtomicHelper$1',
'org.apache.hadoop.service.launcher.InterruptEscalator',
'org.apache.hadoop.service.launcher.IrqHandler',
'com.google.common.cache.Striped64',
'com.google.common.cache.Striped64$1',
'com.google.common.cache.Striped64$Cell',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'com.google.common.hash.Striped64',
'com.google.common.hash.Striped64$1',
'com.google.common.hash.Striped64$Cell',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64',
'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$1',
'org.apache.hadoop.thirdparty.com.google.common.cache.Striped64$Cell',
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64',
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1',
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell',
'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'org.apache.hadoop.thirdparty.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'org.apache.hadoop.thirdparty.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil',
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1',
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor',
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor',
'org.apache.hadoop.shaded.com.google.common.cache.Striped64',
'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1',
'org.apache.hadoop.shaded.com.google.common.cache.Striped64$Cell',
'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'org.apache.hadoop.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'org.apache.hadoop.shaded.com.google.common.hash.Striped64',
'org.apache.hadoop.shaded.com.google.common.hash.Striped64$1',
'org.apache.hadoop.shaded.com.google.common.hash.Striped64$Cell',
'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'org.apache.hadoop.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'org.apache.hadoop.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField',
'org.apache.hadoop.shaded.org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$1',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.cache.Striped64$Cell',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$1',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.hash.Striped64$Cell',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper',
'org.apache.hadoop.shaded.org.apache.curator.shaded.com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1',
'org.apache.hadoop.shaded.org.xbill.DNS.spi.DNSJavaNameServiceDescriptor',
'org.apache.hadoop.shaded.org.xerial.snappy.pure.PureJavaSnappy',
'org.apache.hadoop.shaded.org.xerial.snappy.pure.SnappyRawCompressor',
'org.apache.hadoop.shaded.org.xerial.snappy.pure.SnappyRawDecompressor',
'org.apache.hadoop.shaded.org.xerial.snappy.pure.UnsafeUtil',
'org.apache.avro.reflect.FieldAccessUnsafe',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeBooleanField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeByteField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCachedField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCharField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeCustomEncodedField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeDoubleField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeFloatField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeIntField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeLongField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeObjectField',
'org.apache.avro.reflect.FieldAccessUnsafe$UnsafeShortField',
)
}