fix TestShardSearching to make a new _TestUtil.getTempDir for each shard, so it does not reuse dirs across test runs when you fix the seed

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1368255 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-08-01 21:26:38 +00:00
parent 4eb362c0b3
commit b75834bded
2 changed files with 14 additions and 9 deletions

View File

@ -77,8 +77,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
System.out.println("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds); System.out.println("TEST: numNodes=" + numNodes + " runTimeSec=" + runTimeSec + " maxSearcherAgeSeconds=" + maxSearcherAgeSeconds);
} }
start(_TestUtil.getTempDir("TestShardSearching").toString(), start(numNodes,
numNodes,
runTimeSec, runTimeSec,
maxSearcherAgeSeconds maxSearcherAgeSeconds
); );

View File

@ -18,7 +18,6 @@ package org.apache.lucene.search;
*/ */
import java.io.Closeable; import java.io.Closeable;
import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -27,15 +26,17 @@ import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext; import org.apache.lucene.index.TermContext;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LineFileDocs; import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.PrintStreamInfoStream;
import org.apache.lucene.util._TestUtil;
// TODO // TODO
// - doc blocks? so we can test joins/grouping... // - doc blocks? so we can test joins/grouping...
@ -423,11 +424,16 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
private volatile ShardIndexSearcher currentShardSearcher; private volatile ShardIndexSearcher currentShardSearcher;
public NodeState(Random random, String baseDir, int nodeID, int numNodes) throws IOException { public NodeState(Random random, int nodeID, int numNodes) throws IOException {
myNodeID = nodeID; myNodeID = nodeID;
dir = newFSDirectory(new File(baseDir + "." + myNodeID)); dir = newFSDirectory(_TestUtil.getTempDir("ShardSearchingTestBase"));
// TODO: set warmer // TODO: set warmer
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))); IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
if (VERBOSE) {
iwc.setInfoStream(new PrintStreamInfoStream(System.out));
}
writer = new IndexWriter(dir, iwc);
mgr = new SearcherManager(writer, true, null); mgr = new SearcherManager(writer, true, null);
searchers = new SearcherLifetimeManager(); searchers = new SearcherLifetimeManager();
@ -556,14 +562,14 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
long endTimeNanos; long endTimeNanos;
private Thread changeIndicesThread; private Thread changeIndicesThread;
protected void start(String baseDirName, int numNodes, double runTimeSec, int maxSearcherAgeSeconds) throws IOException { protected void start(int numNodes, double runTimeSec, int maxSearcherAgeSeconds) throws IOException {
endTimeNanos = System.nanoTime() + (long) (runTimeSec*1000000000); endTimeNanos = System.nanoTime() + (long) (runTimeSec*1000000000);
this.maxSearcherAgeSeconds = maxSearcherAgeSeconds; this.maxSearcherAgeSeconds = maxSearcherAgeSeconds;
nodes = new NodeState[numNodes]; nodes = new NodeState[numNodes];
for(int nodeID=0;nodeID<numNodes;nodeID++) { for(int nodeID=0;nodeID<numNodes;nodeID++) {
nodes[nodeID] = new NodeState(random(), baseDirName, nodeID, numNodes); nodes[nodeID] = new NodeState(random(), nodeID, numNodes);
} }
long[] nodeVersions = new long[nodes.length]; long[] nodeVersions = new long[nodes.length];