SOLR-4916: Do not run hdfs tests on Windows as it requires cygwin

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1497451 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2013-06-27 17:08:50 +00:00
parent 02e285f6ce
commit 874e966970
1 changed files with 14 additions and 5 deletions

View File

@ -7,7 +7,10 @@ import java.util.Locale;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.SolrTestCaseJ4;
import org.junit.Assert;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
@ -31,6 +34,10 @@ public class HdfsTestUtil {
private static Locale savedLocale; private static Locale savedLocale;
public static MiniDFSCluster setupClass(String dataDir) throws Exception { public static MiniDFSCluster setupClass(String dataDir) throws Exception {
LuceneTestCase.assumeFalse("HDFS tests on Windows require Cygwin", Constants.WINDOWS);
File dir = new File(dataDir);
new File(dataDir).mkdirs();
savedLocale = Locale.getDefault(); savedLocale = Locale.getDefault();
// TODO: we HACK around HADOOP-9643 // TODO: we HACK around HADOOP-9643
Locale.setDefault(Locale.ENGLISH); Locale.setDefault(Locale.ENGLISH);
@ -41,12 +48,12 @@ public class HdfsTestUtil {
conf.set("dfs.block.access.token.enable", "false"); conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions.enabled", "false"); conf.set("dfs.permissions.enabled", "false");
conf.set("hadoop.security.authentication", "simple"); conf.set("hadoop.security.authentication", "simple");
conf.set("hdfs.minidfs.basedir", dataDir + File.separator + "hdfsBaseDir"); conf.set("hdfs.minidfs.basedir", dir.getAbsolutePath() + File.separator + "hdfsBaseDir");
conf.set("dfs.namenode.name.dir", dataDir + File.separator + "nameNodeNameDir"); conf.set("dfs.namenode.name.dir", dir.getAbsolutePath() + File.separator + "nameNodeNameDir");
System.setProperty("test.build.data", dataDir + File.separator + "hdfs" + File.separator + "build"); System.setProperty("test.build.data", dir.getAbsolutePath() + File.separator + "hdfs" + File.separator + "build");
System.setProperty("test.cache.data", dataDir + File.separator + "hdfs" + File.separator + "cache"); System.setProperty("test.cache.data", dir.getAbsolutePath() + File.separator + "hdfs" + File.separator + "cache");
System.setProperty("solr.lock.type", "hdfs"); System.setProperty("solr.lock.type", "hdfs");
MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null); MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
@ -66,8 +73,10 @@ public class HdfsTestUtil {
} }
// TODO: we HACK around HADOOP-9643 // TODO: we HACK around HADOOP-9643
if (savedLocale != null) {
Locale.setDefault(savedLocale); Locale.setDefault(savedLocale);
} }
}
public static String getDataDir(MiniDFSCluster dfsCluster, String dataDir) public static String getDataDir(MiniDFSCluster dfsCluster, String dataDir)
throws IOException { throws IOException {