/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.test.ClusterConfiguration import org.elasticsearch.gradle.test.RestIntegTestTask import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin' } apply plugin: 'elasticsearch.vagrantsupport' versions << [ 'hadoop2': '2.8.1' ] configurations { hdfsFixture } dependencies { compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}" compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}" compile 'org.apache.htrace:htrace-core4:4.0.1-incubating' compile 'com.google.guava:guava:11.0.2' compile 'com.google.protobuf:protobuf-java:2.5.0' compile 'commons-logging:commons-logging:1.1.3' compile 'commons-cli:commons-cli:1.2' compile 'commons-codec:commons-codec:1.10' compile 'commons-collections:commons-collections:3.2.2' compile 'commons-configuration:commons-configuration:1.6' compile 'commons-io:commons-io:2.4' compile 'commons-lang:commons-lang:2.6' compile 'javax.servlet:servlet-api:2.5' compile "org.slf4j:slf4j-api:${versions.slf4j}" compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" hdfsFixture project(':test:fixtures:hdfs-fixture') } dependencyLicenses { mapping from: /hadoop-.*/, to: 'hadoop' } // MIT Kerberos Vagrant Testing Fixture String box = "krb5kdc" Map vagrantEnvVars = [ 'VAGRANT_CWD' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}", 'VAGRANT_VAGRANTFILE' : 'Vagrantfile', 'VAGRANT_PROJECT_DIR' : "${project(':test:fixtures:krb5kdc-fixture').projectDir}" ] task krb5kdcUpdate(type: org.elasticsearch.gradle.vagrant.VagrantCommandTask) { command 'box' subcommand 'update' boxName box environmentVars vagrantEnvVars dependsOn "vagrantCheckVersion", "virtualboxCheckVersion" } task krb5kdcFixture(type: org.elasticsearch.gradle.test.VagrantFixture) { command 'up' args '--provision', '--provider', 'virtualbox' boxName box environmentVars vagrantEnvVars dependsOn krb5kdcUpdate } task krb5AddPrincipals { dependsOn krb5kdcFixture } List principals = [ "elasticsearch", "hdfs/hdfs.build.elastic.co" ] String realm = "BUILD.ELASTIC.CO" for (String principal : principals) { Task create = project.tasks.create("addPrincipal#${principal}".replace('/', '_'), org.elasticsearch.gradle.vagrant.VagrantCommandTask) { command 'ssh' args '--command', "sudo bash /vagrant/src/main/resources/provision/addprinc.sh $principal" boxName box environmentVars vagrantEnvVars dependsOn krb5kdcFixture } krb5AddPrincipals.dependsOn(create) } // Create HDFS File System Testing Fixtures for HA/Secure combinations for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) { project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) { dependsOn project.configurations.hdfsFixture executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }" waitCondition = { fixture, ant -> // the hdfs.MiniHDFS fixture writes the ports file when // it's ready, so we can just wait for the file to exist return fixture.portsFile.exists() } final List miniHDFSArgs = [] // If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { dependsOn krb5kdcFixture, krb5AddPrincipals Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf") miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5Config}"); if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED') } } // If it's an HA fixture, set a nameservice to use in the JVM options if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { miniHDFSArgs.add("-Dha-nameservice=ha-hdfs") } // Common options miniHDFSArgs.add('hdfs.MiniHDFS') miniHDFSArgs.add(baseDir) // If it's a secure fixture, then set the principal name and keytab locations to use for auth. if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) { Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab") miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}") miniHDFSArgs.add("${keytabPath}") } args miniHDFSArgs.toArray() } } // The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks: project.afterEvaluate { for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) { ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration cluster.dependsOn(project.bundlePlugin) Task restIntegTestTask = project.tasks.getByName(integTestTaskName) restIntegTestTask.clusterConfig.plugin(project.path) // Default jvm arguments for all test clusters String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') + " " + "-Xmx" + System.getProperty('tests.heap.size', '512m') + " " + System.getProperty('tests.jvm.argline', '') // If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options. if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) { Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath() Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath() restIntegTestTask.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}") jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}" if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { jvmArgs = jvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' } // If it's the HA + Secure tests then also set the Kerberos settings for the integration test JVM since we'll // need to auth to HDFS to trigger namenode failovers. if (integTestTaskName.equals('integTestSecureHa')) { Task restIntegTestTaskRunner = project.tasks.getByName("${integTestTaskName}Runner") restIntegTestTaskRunner.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" restIntegTestTaskRunner.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" restIntegTestTaskRunner.jvmArg "-Djava.security.krb5.conf=${krb5conf}" if (project.runtimeJavaVersion == JavaVersion.VERSION_1_9) { restIntegTestTaskRunner.jvmArg '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED' } Path hdfsKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab").toAbsolutePath() restIntegTestTaskRunner.systemProperty "test.krb5.keytab.hdfs", "${hdfsKT}" } } restIntegTestTask.clusterConfig.jvmArgs = jvmArgs } } // Create a Integration Test suite just for HA based tests RestIntegTestTask integTestHa = project.tasks.create('integTestHa', RestIntegTestTask.class) { description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode." } // Create a Integration Test suite just for security based tests RestIntegTestTask integTestSecure = project.tasks.create('integTestSecure', RestIntegTestTask.class) { description = "Runs rest tests against an elasticsearch cluster with HDFS secured by MIT Kerberos." } // Create a Integration Test suite just for HA related security based tests RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa', RestIntegTestTask.class) { description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos." } // Determine HDFS Fixture compatibility for the current build environment. boolean fixtureSupported = false if (Os.isFamily(Os.FAMILY_WINDOWS)) { // hdfs fixture will not start without hadoop native libraries on windows String nativePath = System.getenv("HADOOP_HOME") if (nativePath != null) { Path path = Paths.get(nativePath); if (Files.isDirectory(path) && Files.exists(path.resolve("bin").resolve("winutils.exe")) && Files.exists(path.resolve("bin").resolve("hadoop.dll")) && Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { fixtureSupported = true } else { throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin"); } } } else { fixtureSupported = true } boolean legalPath = rootProject.rootDir.toString().contains(" ") == false if (legalPath == false) { fixtureSupported = false } // Always ignore HA integration tests in the normal integration test runner, they are included below as // part of their own HA-specific integration test tasks. integTestRunner.exclude('**/Ha*TestSuiteIT.class') if (fixtureSupported) { // Check depends on the HA test. Already depends on the standard test. project.check.dependsOn(integTestHa) // Both standard and HA tests depend on their respective HDFS fixtures integTestCluster.dependsOn hdfsFixture integTestHaCluster.dependsOn haHdfsFixture // The normal test runner only runs the standard hdfs rest tests integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository' // Only include the HA integration tests for the HA test task integTestHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class']) } else { if (legalPath) { logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") } else { logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'") } // The normal integration test runner will just test that the plugin loads integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic' // HA fixture is unsupported. Don't run them. integTestHa.setEnabled(false) } // Secure HDFS testing relies on the Vagrant based Kerberos fixture. boolean secureFixtureSupported = false if (fixtureSupported) { secureFixtureSupported = project.rootProject.vagrantSupported } if (secureFixtureSupported) { project.check.dependsOn(integTestSecure) project.check.dependsOn(integTestSecureHa) // Fixture dependencies integTestSecureCluster.dependsOn secureHdfsFixture, krb5kdcFixture integTestSecureHaCluster.dependsOn secureHaHdfsFixture, krb5kdcFixture // Set the keytab files in the classpath so that we can access them from test code without the security manager // freaking out. Path hdfsKeytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs") project.dependencies { testRuntime fileTree(dir: hdfsKeytabPath.toString(), include: ['*.keytab']) } // Run just the secure hdfs rest test suite. integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository' // Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner. integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class') // Only include the HA integration tests for the HA test task integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class']) } else { // Security tests unsupported. Don't run these tests. integTestSecure.enabled = false integTestSecureHa.enabled = false testingConventions.enabled = false } thirdPartyAudit { ignoreMissingClasses() ignoreViolations ( // internal java api: sun.net.dns.ResolverConfiguration // internal java api: sun.net.util.IPAddressUtil 'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver', // internal java api: sun.misc.Unsafe 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', 'org.apache.hadoop.io.nativeio.NativeIO', 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', // internal java api: sun.nio.ch.DirectBuffer // internal java api: sun.misc.Cleaner 'org.apache.hadoop.io.nativeio.NativeIO$POSIX', 'org.apache.hadoop.crypto.CryptoStreamUtils', // internal java api: sun.misc.SignalHandler 'org.apache.hadoop.util.SignalLogger$Handler', ) }