mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-06 04:58:50 +00:00
The repository-hdfs runners need to be disabled it in fips mode. Testing done for all the tasks, dynamic created and static (integTest, integTestHa, integSecureTest, integSecureHaTest)
302 lines
12 KiB
Groovy
302 lines
12 KiB
Groovy
/*
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
* license agreements. See the NOTICE file distributed with
|
|
* this work for additional information regarding copyright
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
* not use this file except in compliance with the License.
|
|
* You may obtain a copy of the License at
|
|
*
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
*
|
|
* Unless required by applicable law or agreed to in writing,
|
|
* software distributed under the License is distributed on an
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
* KIND, either express or implied. See the License for the
|
|
* specific language governing permissions and limitations
|
|
* under the License.
|
|
*/
|
|
|
|
import org.apache.tools.ant.taskdefs.condition.Os
|
|
import org.elasticsearch.gradle.test.RestIntegTestTask
|
|
|
|
import java.nio.file.Files
|
|
import java.nio.file.Path
|
|
import java.nio.file.Paths
|
|
|
|
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
|
|
|
|
apply plugin: 'elasticsearch.test.fixtures'
|
|
|
|
esplugin {
|
|
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
|
|
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
|
|
}
|
|
|
|
versions << [
|
|
'hadoop2': '2.8.1'
|
|
]
|
|
|
|
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture"
|
|
|
|
configurations {
|
|
hdfsFixture
|
|
}
|
|
|
|
dependencies {
|
|
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
|
|
compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
|
|
compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
|
|
compile 'com.google.guava:guava:11.0.2'
|
|
compile 'com.google.protobuf:protobuf-java:2.5.0'
|
|
compile 'commons-logging:commons-logging:1.1.3'
|
|
compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
|
|
compile 'commons-cli:commons-cli:1.2'
|
|
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
|
compile 'commons-collections:commons-collections:3.2.2'
|
|
compile 'commons-configuration:commons-configuration:1.6'
|
|
compile 'commons-io:commons-io:2.4'
|
|
compile 'commons-lang:commons-lang:2.6'
|
|
compile 'javax.servlet:servlet-api:2.5'
|
|
compile "org.slf4j:slf4j-api:${versions.slf4j}"
|
|
compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
|
|
|
|
hdfsFixture project(':test:fixtures:hdfs-fixture')
|
|
// Set the keytab files in the classpath so that we can access them from test code without the security manager
|
|
// freaking out.
|
|
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent)
|
|
}
|
|
|
|
normalization {
|
|
runtimeClasspath {
|
|
// ignore generated keytab files for the purposes of build avoidance
|
|
ignore '*.keytab'
|
|
}
|
|
}
|
|
|
|
dependencyLicenses {
|
|
mapping from: /hadoop-.*/, to: 'hadoop'
|
|
}
|
|
|
|
String realm = "BUILD.ELASTIC.CO"
|
|
String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
|
|
|
|
// Create HDFS File System Testing Fixtures for HA/Secure combinations
|
|
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
|
|
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
|
|
dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
|
|
executable = new File(project.runtimeJavaHome, 'bin/java')
|
|
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
|
|
maxWaitInSeconds 60
|
|
onlyIf { project(':test:fixtures:krb5kdc-fixture').buildFixture.enabled && project.inFipsJvm == false }
|
|
waitCondition = { fixture, ant ->
|
|
// the hdfs.MiniHDFS fixture writes the ports file when
|
|
// it's ready, so we can just wait for the file to exist
|
|
return fixture.portsFile.exists()
|
|
}
|
|
final List<String> miniHDFSArgs = []
|
|
|
|
// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
|
|
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
|
miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}");
|
|
rootProject.globalInfo.ready {
|
|
if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
|
|
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
|
|
}
|
|
}
|
|
}
|
|
// If it's an HA fixture, set a nameservice to use in the JVM options
|
|
if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
|
miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
|
|
}
|
|
|
|
// Common options
|
|
miniHDFSArgs.add('hdfs.MiniHDFS')
|
|
miniHDFSArgs.add(baseDir)
|
|
|
|
// If it's a secure fixture, then set the principal name and keytab locations to use for auth.
|
|
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
|
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
|
|
miniHDFSArgs.add(
|
|
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
|
|
)
|
|
}
|
|
|
|
args miniHDFSArgs.toArray()
|
|
}
|
|
}
|
|
|
|
Set disabledIntegTestTaskNames = []
|
|
|
|
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
|
|
task "${integTestTaskName}"(type: RestIntegTestTask) {
|
|
description = "Runs rest tests against an elasticsearch cluster with HDFS."
|
|
dependsOn(project.bundlePlugin)
|
|
|
|
if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
|
|
enabled = false;
|
|
}
|
|
|
|
if (integTestTaskName.contains("Secure")) {
|
|
if (integTestTaskName.contains("Ha")) {
|
|
dependsOn secureHaHdfsFixture
|
|
} else {
|
|
dependsOn secureHdfsFixture
|
|
}
|
|
}
|
|
|
|
runner {
|
|
onlyIf { project.inFipsJvm == false }
|
|
if (integTestTaskName.contains("Ha")) {
|
|
if (integTestTaskName.contains("Secure")) {
|
|
Path path = buildDir.toPath()
|
|
.resolve("fixtures")
|
|
.resolve("secureHaHdfsFixture")
|
|
.resolve("ports")
|
|
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
|
|
classpath += files(path)
|
|
} else {
|
|
Path path = buildDir.toPath()
|
|
.resolve("fixtures")
|
|
.resolve("haHdfsFixture")
|
|
.resolve("ports")
|
|
nonInputProperties.systemProperty "test.hdfs-fixture.ports", path
|
|
classpath += files(path)
|
|
}
|
|
}
|
|
|
|
if (integTestTaskName.contains("Secure")) {
|
|
if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
|
|
nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
|
|
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
|
|
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
|
|
nonInputProperties.systemProperty (
|
|
"test.krb5.keytab.hdfs",
|
|
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab")
|
|
)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
testClusters."${integTestTaskName}" {
|
|
plugin(file(bundlePlugin.archiveFile))
|
|
if (integTestTaskName.contains("Secure")) {
|
|
systemProperty "java.security.krb5.conf", krb5conf
|
|
extraConfigFile(
|
|
"repository-hdfs/krb5.keytab",
|
|
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
|
|
)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Determine HDFS Fixture compatibility for the current build environment.
|
|
boolean fixtureSupported = false
|
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
|
// hdfs fixture will not start without hadoop native libraries on windows
|
|
String nativePath = System.getenv("HADOOP_HOME")
|
|
if (nativePath != null) {
|
|
Path path = Paths.get(nativePath);
|
|
if (Files.isDirectory(path) &&
|
|
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
|
|
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
|
|
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
|
|
fixtureSupported = true
|
|
} else {
|
|
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
|
|
}
|
|
}
|
|
} else {
|
|
fixtureSupported = true
|
|
}
|
|
|
|
boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
|
|
if (legalPath == false) {
|
|
fixtureSupported = false
|
|
}
|
|
|
|
// Always ignore HA integration tests in the normal integration test runner, they are included below as
|
|
// part of their own HA-specific integration test tasks.
|
|
integTest.runner {
|
|
onlyIf { project.inFipsJvm == false }
|
|
exclude('**/Ha*TestSuiteIT.class')
|
|
}
|
|
|
|
if (fixtureSupported) {
|
|
// Check depends on the HA test. Already depends on the standard test.
|
|
project.check.dependsOn(integTestHa)
|
|
|
|
// Both standard and HA tests depend on their respective HDFS fixtures
|
|
integTest.dependsOn hdfsFixture
|
|
integTestHa.dependsOn haHdfsFixture
|
|
|
|
// The normal test runner only runs the standard hdfs rest tests
|
|
integTest.runner {
|
|
systemProperty 'tests.rest.suite', 'hdfs_repository'
|
|
}
|
|
|
|
// Only include the HA integration tests for the HA test task
|
|
integTestHa.runner {
|
|
setIncludes(['**/Ha*TestSuiteIT.class'])
|
|
}
|
|
} else {
|
|
if (legalPath) {
|
|
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
|
|
} else {
|
|
logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
|
|
}
|
|
|
|
// The normal integration test runner will just test that the plugin loads
|
|
integTest.runner {
|
|
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
|
|
}
|
|
// HA fixture is unsupported. Don't run them.
|
|
integTestHa.setEnabled(false)
|
|
}
|
|
|
|
check.dependsOn(integTestSecure, integTestSecureHa)
|
|
|
|
// Run just the secure hdfs rest test suite.
|
|
integTestSecure.runner {
|
|
systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
|
|
}
|
|
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
|
|
integTestSecure.runner {
|
|
exclude('**/Ha*TestSuiteIT.class')
|
|
}
|
|
// Only include the HA integration tests for the HA test task
|
|
integTestSecureHa.runner {
|
|
setIncludes(['**/Ha*TestSuiteIT.class'])
|
|
}
|
|
|
|
thirdPartyAudit {
|
|
ignoreMissingClasses()
|
|
ignoreViolations (
|
|
// internal java api: sun.net.dns.ResolverConfiguration
|
|
// internal java api: sun.net.util.IPAddressUtil
|
|
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
|
|
|
|
// internal java api: sun.misc.Unsafe
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
|
|
'org.apache.hadoop.io.nativeio.NativeIO',
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
|
|
|
|
// internal java api: sun.nio.ch.DirectBuffer
|
|
// internal java api: sun.misc.Cleaner
|
|
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
|
|
'org.apache.hadoop.crypto.CryptoStreamUtils',
|
|
|
|
// internal java api: sun.misc.SignalHandler
|
|
'org.apache.hadoop.util.SignalLogger$Handler',
|
|
)
|
|
}
|