in the plugin: guard against HADOOP_HOME in environment on any platform.

hdfs fixture: minihdfs works on windows now, if things are properly set
but our test fixture still cannot launch this on windows.
This commit is contained in:
Robert Muir 2015-12-21 02:21:53 -05:00
parent e93c491dbe
commit f67390e0c8
3 changed files with 47 additions and 24 deletions

View File

@ -19,6 +19,11 @@
//apply plugin: 'nebula.provided-base' //apply plugin: 'nebula.provided-base'
import org.apache.tools.ant.taskdefs.condition.Os
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
esplugin { esplugin {
description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.'
classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin' classname 'org.elasticsearch.plugin.hadoop.hdfs.HdfsPlugin'
@ -65,16 +70,34 @@ task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) {
} }
integTest { integTest {
// hdfs fixture will not start without hadoop native libraries on windows boolean fixtureSupported = false;
// so we can't run integration tests against external hadoop here. if (Os.isFamily(Os.FAMILY_WINDOWS)) {
if (System.getProperty("os.name").startsWith("Windows")) { // hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin");
}
}
} else {
fixtureSupported = true
}
if (fixtureSupported) {
dependsOn hdfsFixture
} else {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
systemProperty 'tests.rest.blacklist', [ systemProperty 'tests.rest.blacklist', [
'hdfs_repository/20_repository/*', 'hdfs_repository/20_repository/*',
'hdfs_repository/30_snapshot/*', 'hdfs_repository/30_snapshot/*',
'hdfs_repository/40_restore/*' 'hdfs_repository/40_restore/*'
].join(',') ].join(',')
} else {
dependsOn hdfsFixture
} }
} }

View File

@ -54,14 +54,14 @@ public class HdfsPlugin extends Plugin {
private static Void evilHadoopInit() { private static Void evilHadoopInit() {
// hack: on Windows, Shell's clinit has a similar problem that on unix, // hack: on Windows, Shell's clinit has a similar problem that on unix,
// but here we can workaround it for now by setting hadoop home // but here we can workaround it for now by setting hadoop home
// on unix: we still want to set this to something we control, because
// if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom
// TODO: remove THIS when hadoop is fixed // TODO: remove THIS when hadoop is fixed
Path hadoopHome = null; Path hadoopHome = null;
String oldValue = null; String oldValue = null;
try { try {
if (Constants.WINDOWS) { hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath();
hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString());
}
Class.forName("org.apache.hadoop.security.UserGroupInformation"); Class.forName("org.apache.hadoop.security.UserGroupInformation");
Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.StringUtils");
Class.forName("org.apache.hadoop.util.ShutdownHookManager"); Class.forName("org.apache.hadoop.util.ShutdownHookManager");
@ -69,19 +69,17 @@ public class HdfsPlugin extends Plugin {
throw new RuntimeException(e); throw new RuntimeException(e);
} finally { } finally {
// try to clean up the hack // try to clean up the hack
if (Constants.WINDOWS) { if (oldValue == null) {
if (oldValue == null) { System.clearProperty("hadoop.home.dir");
System.clearProperty("hadoop.home.dir"); } else {
} else { System.setProperty("hadoop.home.dir", oldValue);
System.setProperty("hadoop.home.dir", oldValue);
}
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {}
} }
try {
// try to clean up our temp dir too if we can
if (hadoopHome != null) {
Files.delete(hadoopHome);
}
} catch (IOException thisIsBestEffort) {}
} }
return null; return null;
} }

View File

@ -49,9 +49,11 @@ public class MiniHDFS {
// configure Paths // configure Paths
Path baseDir = Paths.get(args[0]); Path baseDir = Paths.get(args[0]);
// hadoop-home/, so logs will not complain // hadoop-home/, so logs will not complain
Path hadoopHome = baseDir.resolve("hadoop-home"); if (System.getenv("HADOOP_HOME") == null) {
Files.createDirectories(hadoopHome); Path hadoopHome = baseDir.resolve("hadoop-home");
System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); Files.createDirectories(hadoopHome);
System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString());
}
// hdfs-data/, where any data is going // hdfs-data/, where any data is going
Path hdfsHome = baseDir.resolve("hdfs-data"); Path hdfsHome = baseDir.resolve("hdfs-data");