2018-07-23 15:11:41 -04:00
|
|
|
/*
|
2015-11-24 18:04:40 -05:00
|
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
|
|
* license agreements. See the NOTICE file distributed with
|
|
|
|
* this work for additional information regarding copyright
|
|
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
* not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing,
|
|
|
|
* software distributed under the License is distributed on an
|
|
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
* KIND, either express or implied. See the License for the
|
|
|
|
* specific language governing permissions and limitations
|
|
|
|
* under the License.
|
|
|
|
*/
|
|
|
|
|
2015-12-21 02:21:53 -05:00
|
|
|
import org.apache.tools.ant.taskdefs.condition.Os
|
2019-11-01 14:33:11 -04:00
|
|
|
import org.elasticsearch.gradle.info.BuildParams
|
2017-05-10 17:42:20 -04:00
|
|
|
import org.elasticsearch.gradle.test.RestIntegTestTask
|
|
|
|
|
2015-12-21 02:21:53 -05:00
|
|
|
import java.nio.file.Files
|
|
|
|
import java.nio.file.Path
|
|
|
|
import java.nio.file.Paths
|
2019-04-09 14:52:50 -04:00
|
|
|
|
2019-07-10 18:42:28 -04:00
|
|
|
import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE
|
|
|
|
|
2019-03-28 11:23:40 -04:00
|
|
|
apply plugin: 'elasticsearch.test.fixtures'
|
|
|
|
|
2015-11-24 18:04:40 -05:00
|
|
|
esplugin {
|
|
|
|
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
|
2015-12-21 12:39:40 -05:00
|
|
|
classname 'org.elasticsearch.repositories.hdfs.HdfsPlugin'
|
2015-11-24 18:04:40 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
versions << [
|
2020-03-25 14:01:29 -04:00
|
|
|
'hadoop2': '2.8.5'
|
2015-11-24 18:04:40 -05:00
|
|
|
]
|
|
|
|
|
2019-09-23 05:48:47 -04:00
|
|
|
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
|
2019-03-28 11:23:40 -04:00
|
|
|
|
2015-12-20 16:00:37 -05:00
|
|
|
configurations {
|
|
|
|
hdfsFixture
|
|
|
|
}
|
|
|
|
|
2015-12-18 18:26:58 -05:00
|
|
|
dependencies {
|
2015-12-18 19:52:22 -05:00
|
|
|
compile "org.apache.hadoop:hadoop-client:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-common:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-annotations:${versions.hadoop2}"
|
|
|
|
compile "org.apache.hadoop:hadoop-auth:${versions.hadoop2}"
|
2015-12-18 22:11:58 -05:00
|
|
|
compile "org.apache.hadoop:hadoop-hdfs:${versions.hadoop2}"
|
2017-06-30 17:57:56 -04:00
|
|
|
compile "org.apache.hadoop:hadoop-hdfs-client:${versions.hadoop2}"
|
|
|
|
compile 'org.apache.htrace:htrace-core4:4.0.1-incubating'
|
2020-04-08 02:20:17 -04:00
|
|
|
runtimeOnly 'com.google.guava:guava:11.0.2'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'com.google.protobuf:protobuf-java:2.5.0'
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-logging:commons-logging:1.1.3'
|
2019-05-30 16:06:11 -04:00
|
|
|
compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
|
2016-03-09 03:10:59 -05:00
|
|
|
compile 'commons-cli:commons-cli:1.2'
|
2019-01-30 16:24:29 -05:00
|
|
|
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-collections:commons-collections:3.2.2'
|
|
|
|
compile 'commons-configuration:commons-configuration:1.6'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'commons-io:commons-io:2.4'
|
2015-12-18 19:52:22 -05:00
|
|
|
compile 'commons-lang:commons-lang:2.6'
|
2015-12-18 22:11:58 -05:00
|
|
|
compile 'javax.servlet:servlet-api:2.5'
|
2015-12-20 22:08:18 -05:00
|
|
|
compile "org.slf4j:slf4j-api:${versions.slf4j}"
|
2017-09-05 19:38:17 -04:00
|
|
|
compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
|
2015-12-20 16:00:37 -05:00
|
|
|
|
|
|
|
hdfsFixture project(':test:fixtures:hdfs-fixture')
|
2019-04-19 02:34:23 -04:00
|
|
|
// Set the keytab files in the classpath so that we can access them from test code without the security manager
|
|
|
|
// freaking out.
|
2019-10-03 03:50:46 -04:00
|
|
|
if (isEclipse == false) {
|
2019-11-14 06:01:23 -05:00
|
|
|
testRuntime files(project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab").parent)
|
2019-10-03 03:50:46 -04:00
|
|
|
}
|
2019-07-10 18:42:28 -04:00
|
|
|
}
|
|
|
|
|
2020-03-19 13:28:59 -04:00
|
|
|
restResources {
|
|
|
|
restApi {
|
|
|
|
includeCore '_common', 'cluster', 'nodes', 'indices', 'index', 'snapshot'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-10 18:42:28 -04:00
|
|
|
normalization {
|
2019-11-14 06:01:23 -05:00
|
|
|
runtimeClasspath {
|
|
|
|
// ignore generated keytab files for the purposes of build avoidance
|
|
|
|
ignore '*.keytab'
|
|
|
|
// ignore fixture ports file which is on the classpath primarily to pacify the security manager
|
2020-02-27 12:53:33 -05:00
|
|
|
ignore 'ports'
|
2019-11-14 06:01:23 -05:00
|
|
|
}
|
2015-12-18 18:26:58 -05:00
|
|
|
}
|
2015-11-24 18:04:40 -05:00
|
|
|
|
|
|
|
dependencyLicenses {
|
2015-12-18 18:26:58 -05:00
|
|
|
mapping from: /hadoop-.*/, to: 'hadoop'
|
2015-11-24 18:04:40 -05:00
|
|
|
}
|
|
|
|
|
2017-05-10 17:42:20 -04:00
|
|
|
String realm = "BUILD.ELASTIC.CO"
|
2019-04-19 02:34:23 -04:00
|
|
|
String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
|
2017-05-10 17:42:20 -04:00
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// Create HDFS File System Testing Fixtures for HA/Secure combinations
|
|
|
|
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
|
|
|
|
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
|
2019-03-28 11:23:40 -04:00
|
|
|
dependsOn project.configurations.hdfsFixture, project(':test:fixtures:krb5kdc-fixture').tasks.postProcessFixture
|
2019-11-01 14:33:11 -04:00
|
|
|
executable = "${BuildParams.runtimeJavaHome}/bin/java"
|
2019-11-14 06:01:23 -05:00
|
|
|
env 'CLASSPATH', "${-> project.configurations.hdfsFixture.asPath}"
|
2019-07-11 22:44:01 -04:00
|
|
|
maxWaitInSeconds 60
|
2020-02-21 18:24:05 -05:00
|
|
|
onlyIf { BuildParams.inFipsJvm == false }
|
2018-06-13 06:40:22 -04:00
|
|
|
waitCondition = { fixture, ant ->
|
|
|
|
// the hdfs.MiniHDFS fixture writes the ports file when
|
|
|
|
// it's ready, so we can just wait for the file to exist
|
|
|
|
return fixture.portsFile.exists()
|
2019-06-12 23:02:16 -04:00
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
final List<String> miniHDFSArgs = []
|
|
|
|
|
|
|
|
// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
|
|
|
|
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
2019-03-28 11:23:40 -04:00
|
|
|
miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}");
|
2020-03-23 18:30:10 -04:00
|
|
|
if (BuildParams.runtimeJavaVersion == JavaVersion.VERSION_1_9) {
|
|
|
|
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
|
2017-12-01 14:26:05 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// If it's an HA fixture, set a nameservice to use in the JVM options
|
|
|
|
if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
|
|
|
miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
|
|
|
|
}
|
2017-05-10 17:42:20 -04:00
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// Common options
|
|
|
|
miniHDFSArgs.add('hdfs.MiniHDFS')
|
|
|
|
miniHDFSArgs.add(baseDir)
|
2017-05-10 17:42:20 -04:00
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// If it's a secure fixture, then set the principal name and keytab locations to use for auth.
|
|
|
|
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
|
|
|
|
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
|
2019-03-28 11:23:40 -04:00
|
|
|
miniHDFSArgs.add(
|
2019-11-14 06:01:23 -05:00
|
|
|
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
|
2019-03-28 11:23:40 -04:00
|
|
|
)
|
2017-12-01 14:26:05 -05:00
|
|
|
}
|
2017-06-13 13:26:48 -04:00
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
args miniHDFSArgs.toArray()
|
2017-06-13 13:26:48 -04:00
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
}
|
|
|
|
|
2019-07-11 22:44:01 -04:00
|
|
|
Set disabledIntegTestTaskNames = []
|
2019-06-12 23:02:16 -04:00
|
|
|
|
2019-04-19 02:34:23 -04:00
|
|
|
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
|
|
|
|
task "${integTestTaskName}"(type: RestIntegTestTask) {
|
|
|
|
description = "Runs rest tests against an elasticsearch cluster with HDFS."
|
|
|
|
dependsOn(project.bundlePlugin)
|
2019-06-12 23:02:16 -04:00
|
|
|
|
2019-07-18 01:07:33 -04:00
|
|
|
if (disabledIntegTestTaskNames.contains(integTestTaskName)) {
|
2019-06-12 23:02:16 -04:00
|
|
|
enabled = false;
|
|
|
|
}
|
|
|
|
|
2019-07-11 22:44:01 -04:00
|
|
|
if (integTestTaskName.contains("Secure")) {
|
|
|
|
if (integTestTaskName.contains("Ha")) {
|
|
|
|
dependsOn secureHaHdfsFixture
|
|
|
|
} else {
|
|
|
|
dependsOn secureHdfsFixture
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-19 02:34:23 -04:00
|
|
|
runner {
|
2019-11-01 14:33:11 -04:00
|
|
|
onlyIf { BuildParams.inFipsJvm == false }
|
2019-07-11 22:44:01 -04:00
|
|
|
if (integTestTaskName.contains("Ha")) {
|
2020-02-27 12:53:33 -05:00
|
|
|
Path portsFile
|
|
|
|
File portsFileDir = file("${workingDir}/hdfsFixture")
|
2019-07-11 22:44:01 -04:00
|
|
|
if (integTestTaskName.contains("Secure")) {
|
2020-02-27 12:53:33 -05:00
|
|
|
portsFile = buildDir.toPath()
|
2019-11-14 06:01:23 -05:00
|
|
|
.resolve("fixtures")
|
|
|
|
.resolve("secureHaHdfsFixture")
|
|
|
|
.resolve("ports")
|
2019-07-11 22:44:01 -04:00
|
|
|
} else {
|
2020-02-27 12:53:33 -05:00
|
|
|
portsFile = buildDir.toPath()
|
2019-11-14 06:01:23 -05:00
|
|
|
.resolve("fixtures")
|
|
|
|
.resolve("haHdfsFixture")
|
|
|
|
.resolve("ports")
|
2019-07-11 22:44:01 -04:00
|
|
|
}
|
2020-02-27 12:53:33 -05:00
|
|
|
nonInputProperties.systemProperty "test.hdfs-fixture.ports", file("$portsFileDir/ports")
|
|
|
|
classpath += files(portsFileDir)
|
|
|
|
// Copy ports file to separate location which is placed on the test classpath
|
|
|
|
doFirst {
|
|
|
|
mkdir(portsFileDir)
|
|
|
|
copy {
|
|
|
|
from portsFile
|
|
|
|
into portsFileDir
|
|
|
|
}
|
|
|
|
}
|
2019-07-11 22:44:01 -04:00
|
|
|
}
|
|
|
|
|
2019-04-19 02:34:23 -04:00
|
|
|
if (integTestTaskName.contains("Secure")) {
|
2019-06-12 23:02:16 -04:00
|
|
|
if (disabledIntegTestTaskNames.contains(integTestTaskName) == false) {
|
2019-11-14 06:01:23 -05:00
|
|
|
nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
|
|
|
|
nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
|
|
|
|
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
|
|
|
|
nonInputProperties.systemProperty(
|
|
|
|
"test.krb5.keytab.hdfs",
|
|
|
|
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
|
|
|
|
)
|
2019-06-12 23:02:16 -04:00
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-19 02:34:23 -04:00
|
|
|
testClusters."${integTestTaskName}" {
|
|
|
|
plugin(file(bundlePlugin.archiveFile))
|
|
|
|
if (integTestTaskName.contains("Secure")) {
|
|
|
|
systemProperty "java.security.krb5.conf", krb5conf
|
|
|
|
extraConfigFile(
|
2019-11-14 06:01:23 -05:00
|
|
|
"repository-hdfs/krb5.keytab",
|
|
|
|
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"), IGNORE_VALUE
|
2019-04-19 02:34:23 -04:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
}
|
2017-06-13 13:26:48 -04:00
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// Determine HDFS Fixture compatibility for the current build environment.
|
2017-06-12 09:41:35 -04:00
|
|
|
boolean fixtureSupported = false
|
2017-02-23 14:19:08 -05:00
|
|
|
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
|
|
|
// hdfs fixture will not start without hadoop native libraries on windows
|
|
|
|
String nativePath = System.getenv("HADOOP_HOME")
|
|
|
|
if (nativePath != null) {
|
|
|
|
Path path = Paths.get(nativePath);
|
|
|
|
if (Files.isDirectory(path) &&
|
2019-11-14 06:01:23 -05:00
|
|
|
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
|
|
|
|
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
|
|
|
|
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
|
2017-02-23 14:19:08 -05:00
|
|
|
fixtureSupported = true
|
|
|
|
} else {
|
2017-05-25 15:11:33 -04:00
|
|
|
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
|
2015-12-21 02:21:53 -05:00
|
|
|
}
|
|
|
|
}
|
2017-02-23 14:19:08 -05:00
|
|
|
} else {
|
|
|
|
fixtureSupported = true
|
|
|
|
}
|
2015-12-21 02:21:53 -05:00
|
|
|
|
2018-05-11 08:36:31 -04:00
|
|
|
boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
|
|
|
|
if (legalPath == false) {
|
|
|
|
fixtureSupported = false
|
|
|
|
}
|
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// Always ignore HA integration tests in the normal integration test runner, they are included below as
|
|
|
|
// part of their own HA-specific integration test tasks.
|
2019-04-19 02:34:23 -04:00
|
|
|
integTest.runner {
|
2019-11-01 14:33:11 -04:00
|
|
|
onlyIf { BuildParams.inFipsJvm == false }
|
2019-04-19 02:34:23 -04:00
|
|
|
exclude('**/Ha*TestSuiteIT.class')
|
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
|
2017-02-23 14:19:08 -05:00
|
|
|
if (fixtureSupported) {
|
2017-12-01 14:26:05 -05:00
|
|
|
// Check depends on the HA test. Already depends on the standard test.
|
|
|
|
project.check.dependsOn(integTestHa)
|
|
|
|
|
|
|
|
// Both standard and HA tests depend on their respective HDFS fixtures
|
2019-04-19 02:34:23 -04:00
|
|
|
integTest.dependsOn hdfsFixture
|
|
|
|
integTestHa.dependsOn haHdfsFixture
|
2017-12-01 14:26:05 -05:00
|
|
|
|
|
|
|
// The normal test runner only runs the standard hdfs rest tests
|
2019-04-19 02:34:23 -04:00
|
|
|
integTest.runner {
|
|
|
|
systemProperty 'tests.rest.suite', 'hdfs_repository'
|
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
|
|
|
|
// Only include the HA integration tests for the HA test task
|
2019-04-19 02:34:23 -04:00
|
|
|
integTestHa.runner {
|
|
|
|
setIncludes(['**/Ha*TestSuiteIT.class'])
|
|
|
|
}
|
2017-02-23 14:19:08 -05:00
|
|
|
} else {
|
2018-05-11 08:36:31 -04:00
|
|
|
if (legalPath) {
|
|
|
|
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
|
|
|
|
} else {
|
|
|
|
logger.warn("hdfsFixture unsupported since there are spaces in the path: '" + rootProject.rootDir.toString() + "'")
|
|
|
|
}
|
|
|
|
|
2017-12-01 14:26:05 -05:00
|
|
|
// The normal integration test runner will just test that the plugin loads
|
2019-04-19 02:34:23 -04:00
|
|
|
integTest.runner {
|
|
|
|
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
|
|
|
|
}
|
2017-12-01 14:26:05 -05:00
|
|
|
// HA fixture is unsupported. Don't run them.
|
|
|
|
integTestHa.setEnabled(false)
|
2015-12-20 16:00:37 -05:00
|
|
|
}
|
|
|
|
|
2019-03-28 11:23:40 -04:00
|
|
|
check.dependsOn(integTestSecure, integTestSecureHa)
|
2017-05-10 17:42:20 -04:00
|
|
|
|
2019-03-28 11:23:40 -04:00
|
|
|
// Run just the secure hdfs rest test suite.
|
2019-04-19 02:34:23 -04:00
|
|
|
integTestSecure.runner {
|
|
|
|
systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
|
|
|
|
}
|
2019-03-28 11:23:40 -04:00
|
|
|
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
|
2019-04-19 02:34:23 -04:00
|
|
|
integTestSecure.runner {
|
|
|
|
exclude('**/Ha*TestSuiteIT.class')
|
|
|
|
}
|
2019-03-28 11:23:40 -04:00
|
|
|
// Only include the HA integration tests for the HA test task
|
2019-04-19 02:34:23 -04:00
|
|
|
integTestSecureHa.runner {
|
|
|
|
setIncludes(['**/Ha*TestSuiteIT.class'])
|
|
|
|
}
|
2019-03-28 11:23:40 -04:00
|
|
|
|
2019-01-07 10:24:19 -05:00
|
|
|
thirdPartyAudit {
|
2019-11-14 06:01:23 -05:00
|
|
|
ignoreMissingClasses()
|
|
|
|
ignoreViolations(
|
|
|
|
// internal java api: sun.net.dns.ResolverConfiguration
|
|
|
|
// internal java api: sun.net.util.IPAddressUtil
|
|
|
|
'org.apache.hadoop.security.SecurityUtil$QualifiedHostResolver',
|
|
|
|
|
|
|
|
// internal java api: sun.misc.Unsafe
|
|
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator',
|
|
|
|
'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1',
|
|
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
|
|
|
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
|
|
|
|
'org.apache.hadoop.io.nativeio.NativeIO',
|
|
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
|
|
|
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
|
|
|
|
|
|
|
|
// internal java api: sun.nio.ch.DirectBuffer
|
|
|
|
// internal java api: sun.misc.Cleaner
|
|
|
|
'org.apache.hadoop.io.nativeio.NativeIO$POSIX',
|
|
|
|
'org.apache.hadoop.crypto.CryptoStreamUtils',
|
|
|
|
|
|
|
|
// internal java api: sun.misc.SignalHandler
|
|
|
|
'org.apache.hadoop.util.SignalLogger$Handler',
|
|
|
|
)
|
2018-08-28 03:03:30 -04:00
|
|
|
}
|