in the plugin: guard against HADOOP_HOME in environment on any platform.

hdfs fixture: minihdfs works on windows now, if things are properly set
but our test fixture still cannot launch this on windows.
This commit is contained in:
Robert Muir 2015-12-21 02:21:53 -05:00
parent e93c491dbe
commit f67390e0c8
3 changed files with 47 additions and 24 deletions

View File

@ -19,6 +19,11 @@
//apply plugin: 'nebula.provided-base'
import org.apache.tools.ant.taskdefs.condition.Os
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin'
@ -65,16 +70,34 @@ task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) {
}
integTest {
// hdfs fixture will not start without hadoop native libraries on windows
// so we can't run integration tests against external hadoop here.
if (System.getProperty("os.name").startsWith("Windows")) {
boolean fixtureSupported = false;
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin");
}
}
} else {
fixtureSupported = true
}
if (fixtureSupported) {
dependsOn hdfsFixture
} else {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
systemProperty 'tests.rest.blacklist', [
'hdfs_repository/20_repository/*',
'hdfs_repository/30_snapshot/*',
'hdfs_repository/40_restore/*'
].join(',')
} else {
dependsOn hdfsFixture
}
}

View File

@ -54,14 +54,14 @@ public class HdfsPlugin extends Plugin {
private static Void evilHadoopInit() {
// hack: on Windows, Shell's clinit has a similar problem that on unix,
// but here we can workaround it for now by setting hadoop home
// on unix: we still want to set this to something we control, because
// if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom
// TODO: remove THIS when hadoop is fixed
Path hadoopHome = null;
String oldValue = null;
try {
if (Constants.WINDOWS) {
hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
}
hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
Class.forName("org.apache.hadoop.security.UserGroupInformation");
Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager");
@ -69,19 +69,17 @@ public class HdfsPlugin extends Plugin {
throw new RuntimeException(e);
} finally {
// try to clean up the hack
if (Constants.WINDOWS) {
if (oldValue == null) {
System.clearProperty("hadoop.home.dir");
} else {
System.setProperty("hadoop.home.dir", oldValue);
}
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {}
if (oldValue == null) {
System.clearProperty("hadoop.home.dir");
} else {
System.setProperty("hadoop.home.dir", oldValue);
}
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {}
}
return null;
}

View File

@ -49,9 +49,11 @@ public class MiniHDFS {
// configure Paths
Path baseDir = Paths.get(args[0]);
// hadoop-home/, so logs will not complain
Path hadoopHome = baseDir.resolve("hadoop-home");
Files.createDirectories(hadoopHome);
System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString());
if (System.getenv("HADOOP_HOME") == null) {
Path hadoopHome = baseDir.resolve("hadoop-home");
Files.createDirectories(hadoopHome);
System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString());
}
// hdfs-data/, where any data is going
Path hdfsHome = baseDir.resolve("hdfs-data");