tests: fix hdfs collections api test

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1531771 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2013-10-14 01:30:40 +00:00
parent c5803cabf5
commit 16ba0ac6f5
2 changed files with 5 additions and 0 deletions

View File

@ -45,6 +45,8 @@
<double name="maxWriteMBPerSecFlush">2000000</double> <double name="maxWriteMBPerSecFlush">2000000</double>
<double name="maxWriteMBPerSecMerge">3000000</double> <double name="maxWriteMBPerSecMerge">3000000</double>
<double name="maxWriteMBPerSecRead">4000000</double> <double name="maxWriteMBPerSecRead">4000000</double>
<str name="solr.hdfs.home">${solr.hdfs.home:}</str>
<bool name="solr.hdfs.blockcache.enabled">${solr.hdfs.blockcache.enabled:true}</bool>
</directoryFactory> </directoryFactory>
<luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion> <luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>

View File

@ -80,6 +80,9 @@ public class HdfsTestUtil {
public static String getDataDir(MiniDFSCluster dfsCluster, String dataDir) public static String getDataDir(MiniDFSCluster dfsCluster, String dataDir)
throws IOException { throws IOException {
if (dataDir == null) {
return null;
}
URI uri = dfsCluster.getURI(); URI uri = dfsCluster.getURI();
String dir = uri.toString() String dir = uri.toString()
+ "/" + "/"