From f67390e0c8ed75faee0c45c392b116fae694710d Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Mon, 21 Dec 2015 02:21:53 -0500 Subject: [PATCH] in the plugin: guard against HADOOP_HOME in environment on any platform. hdfs fixture: minihdfs works on windows now, if things are properly set but our test fixture still cannot launch this on windows. --- plugins/repository-hdfs/build.gradle | 33 ++++++++++++++++--- .../plugin/hadoop/hdfs/HdfsPlugin.java | 30 ++++++++--------- .../src/main/java/hdfs/MiniHDFS.java | 8 +++-- 3 files changed, 47 insertions(+), 24 deletions(-) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 4bade1850f0..f193fa2aba1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -18,6 +18,11 @@ */ //apply plugin: 'nebula.provided-base' + +import org.apache.tools.ant.taskdefs.condition.Os +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths esplugin { description 'The HDFS repository plugin adds support for Hadoop Distributed File-System (HDFS) repositories.' @@ -65,16 +70,34 @@ task hdfsFixture(type: org.elasticsearch.gradle.test.Fixture) { } integTest { - // hdfs fixture will not start without hadoop native libraries on windows - // so we can't run integration tests against external hadoop here. - if (System.getProperty("os.name").startsWith("Windows")) { + boolean fixtureSupported = false; + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + // hdfs fixture will not start without hadoop native libraries on windows + String nativePath = System.getenv("HADOOP_HOME") + if (nativePath != null) { + Path path = Paths.get(nativePath); + if (Files.isDirectory(path) && + Files.exists(path.resolve("bin").resolve("winutils.exe")) && + Files.exists(path.resolve("bin").resolve("hadoop.dll")) && + Files.exists(path.resolve("bin").resolve("hdfs.dll"))) { + fixtureSupported = true + } else { + throw new IllegalStateException("HADOOP_HOME: " + path.toString() + " is invalid, does not contain hadoop native libraries in $HADOOP_HOME/bin"); + } + } + } else { + fixtureSupported = true + } + + if (fixtureSupported) { + dependsOn hdfsFixture + } else { + logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH") systemProperty 'tests.rest.blacklist', [ 'hdfs_repository/20_repository/*', 'hdfs_repository/30_snapshot/*', 'hdfs_repository/40_restore/*' ].join(',') - } else { - dependsOn hdfsFixture } } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java index 07680e48a1a..87f4f6024d7 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/plugin/hadoop/hdfs/HdfsPlugin.java @@ -54,14 +54,14 @@ public class HdfsPlugin extends Plugin { private static Void evilHadoopInit() { // hack: on Windows, Shell's clinit has a similar problem that on unix, // but here we can workaround it for now by setting hadoop home + // on unix: we still want to set this to something we control, because + // if the user happens to have HADOOP_HOME in their environment -> checkHadoopHome goes boom // TODO: remove THIS when hadoop is fixed Path hadoopHome = null; String oldValue = null; try { - if (Constants.WINDOWS) { - hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); - oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); - } + hadoopHome = Files.createTempDirectory("hadoop").toAbsolutePath(); + oldValue = System.setProperty("hadoop.home.dir", hadoopHome.toString()); Class.forName("org.apache.hadoop.security.UserGroupInformation"); Class.forName("org.apache.hadoop.util.StringUtils"); Class.forName("org.apache.hadoop.util.ShutdownHookManager"); @@ -69,19 +69,17 @@ public class HdfsPlugin extends Plugin { throw new RuntimeException(e); } finally { // try to clean up the hack - if (Constants.WINDOWS) { - if (oldValue == null) { - System.clearProperty("hadoop.home.dir"); - } else { - System.setProperty("hadoop.home.dir", oldValue); - } - try { - // try to clean up our temp dir too if we can - if (hadoopHome != null) { - Files.delete(hadoopHome); - } - } catch (IOException thisIsBestEffort) {} + if (oldValue == null) { + System.clearProperty("hadoop.home.dir"); + } else { + System.setProperty("hadoop.home.dir", oldValue); } + try { + // try to clean up our temp dir too if we can + if (hadoopHome != null) { + Files.delete(hadoopHome); + } + } catch (IOException thisIsBestEffort) {} } return null; } diff --git a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java index f57d389cc90..a4bf47f8eae 100644 --- a/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java +++ b/test/fixtures/hdfs-fixture/src/main/java/hdfs/MiniHDFS.java @@ -49,9 +49,11 @@ public class MiniHDFS { // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain - Path hadoopHome = baseDir.resolve("hadoop-home"); - Files.createDirectories(hadoopHome); - System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + if (System.getenv("HADOOP_HOME") == null) { + Path hadoopHome = baseDir.resolve("hadoop-home"); + Files.createDirectories(hadoopHome); + System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); + } // hdfs-data/, where any data is going Path hdfsHome = baseDir.resolve("hdfs-data");