LUCENE-2618: if normal merges run at same time as optimize, upgrade them to optimize merges if they involve segments marked for optimize; add random thread yield to MockDirWrapper

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1035214 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-11-15 10:31:12 +00:00
parent ebad5443b2
commit 0630ca3119
45 changed files with 226 additions and 182 deletions

View File

@ -18,6 +18,7 @@ package org.apache.lucene.index.codecs.appending;
*/
import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
@ -118,8 +119,8 @@ public class TestAppendingCodec extends LuceneTestCase {
@SuppressWarnings("serial")
private static class AppendingRAMDirectory extends MockDirectoryWrapper {
public AppendingRAMDirectory(Directory delegate) {
super(delegate);
public AppendingRAMDirectory(Random random, Directory delegate) {
super(random, delegate);
}
@Override
@ -132,7 +133,7 @@ public class TestAppendingCodec extends LuceneTestCase {
private static final String text = "the quick brown fox jumped over the lazy dog";
public void testCodec() throws Exception {
Directory dir = new AppendingRAMDirectory(new RAMDirectory());
Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer());
cfg.setCodecProvider(new AppendingCodecProvider());

View File

@ -17,6 +17,8 @@ package org.apache.lucene.queryParser.surround.query;
* limitations under the License.
*/
import java.util.Random;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@ -32,9 +34,9 @@ public class SingleFieldTestDb {
private String[] docs;
private String fieldName;
public SingleFieldTestDb(String[] documents, String fName) {
public SingleFieldTestDb(Random random, String[] documents, String fName) {
try {
db = new MockDirectoryWrapper(new RAMDirectory());
db = new MockDirectoryWrapper(random, new RAMDirectory());
docs = documents;
fieldName = fName;
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(

View File

@ -39,7 +39,7 @@ public class Test02Boolean extends LuceneTestCase {
"a c e a b c"
};
SingleFieldTestDb db1 = new SingleFieldTestDb(docs1, fieldName);
SingleFieldTestDb db1 = new SingleFieldTestDb(random, docs1, fieldName);
public void normalTest1(String query, int[] expdnrs) throws Exception {
BooleanQueryTst bqt = new BooleanQueryTst( query, expdnrs, db1, fieldName, this,

View File

@ -58,7 +58,7 @@ public class Test03Distance extends LuceneTestCase {
"a c e a b c"
};
SingleFieldTestDb db1 = new SingleFieldTestDb(docs1, fieldName);
SingleFieldTestDb db1 = new SingleFieldTestDb(random, docs1, fieldName);
private void distanceTst(String query, int[] expdnrs, SingleFieldTestDb db) throws Exception {
BooleanQueryTst bqt = new BooleanQueryTst( query, expdnrs, db, fieldName, this,
@ -179,7 +179,7 @@ public class Test03Distance extends LuceneTestCase {
""
};
SingleFieldTestDb db2 = new SingleFieldTestDb(docs2, fieldName);
SingleFieldTestDb db2 = new SingleFieldTestDb(random, docs2, fieldName);
public void distanceTest2(String query, int[] expdnrs) throws Exception {
distanceTst(query, expdnrs, db2);
@ -227,7 +227,7 @@ public class Test03Distance extends LuceneTestCase {
""
};
SingleFieldTestDb db3 = new SingleFieldTestDb(docs3, fieldName);
SingleFieldTestDb db3 = new SingleFieldTestDb(random, docs3, fieldName);
public void distanceTest3(String query, int[] expdnrs) throws Exception {
distanceTst(query, expdnrs, db3);

View File

@ -286,6 +286,7 @@ public class IndexWriter implements Closeable {
private IndexFileDeleter deleter;
private Set<SegmentInfo> segmentsToOptimize = new HashSet<SegmentInfo>(); // used by optimize to note those needing optimization
private int optimizeMaxNumSegments;
private Lock writeLock;
@ -2379,6 +2380,7 @@ public class IndexWriter implements Closeable {
synchronized(this) {
resetMergeExceptions();
segmentsToOptimize = new HashSet<SegmentInfo>(segmentInfos);
optimizeMaxNumSegments = maxNumSegments;
// Now mark all pending & running merges as optimize
// merge:
@ -2579,8 +2581,9 @@ public class IndexWriter implements Closeable {
throws CorruptIndexException, IOException {
assert !optimize || maxNumSegmentsOptimize > 0;
if (stopMerges)
if (stopMerges) {
return;
}
// Do not start new merges if we've hit OOME
if (hitOOM) {
@ -2594,19 +2597,21 @@ public class IndexWriter implements Closeable {
if (spec != null) {
final int numMerges = spec.merges.size();
for(int i=0;i<numMerges;i++) {
final MergePolicy.OneMerge merge = ( spec.merges.get(i));
final MergePolicy.OneMerge merge = spec.merges.get(i);
merge.optimize = true;
merge.maxNumSegmentsOptimize = maxNumSegmentsOptimize;
}
}
} else
} else {
spec = mergePolicy.findMerges(segmentInfos);
}
if (spec != null) {
final int numMerges = spec.merges.size();
for(int i=0;i<numMerges;i++)
for(int i=0;i<numMerges;i++) {
registerMerge(spec.merges.get(i));
}
}
}
@ -3613,8 +3618,10 @@ public class IndexWriter implements Closeable {
// disk, updating SegmentInfo, etc.:
readerPool.clear(merge.segments);
if (merge.optimize)
if (merge.optimize) {
// cascade the optimize:
segmentsToOptimize.add(merge.info);
}
return true;
}
@ -3732,12 +3739,19 @@ public class IndexWriter implements Closeable {
boolean isExternal = false;
for(int i=0;i<count;i++) {
final SegmentInfo info = merge.segments.info(i);
if (mergingSegments.contains(info))
if (mergingSegments.contains(info)) {
return false;
if (segmentInfos.indexOf(info) == -1)
}
if (segmentInfos.indexOf(info) == -1) {
return false;
if (info.dir != directory)
}
if (info.dir != directory) {
isExternal = true;
}
if (segmentsToOptimize.contains(info)) {
merge.optimize = true;
merge.maxNumSegmentsOptimize = optimizeMaxNumSegments;
}
}
ensureContiguousMerge(merge);

View File

@ -366,7 +366,7 @@ public class TestAddIndexes extends LuceneTestCase {
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(new RAMDirectory(aux)) });
writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)) });
assertEquals(1060, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@ -398,7 +398,7 @@ public class TestAddIndexes extends LuceneTestCase {
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(4));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(4);
writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(new RAMDirectory(aux)) });
writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)) });
assertEquals(1060, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@ -592,7 +592,7 @@ public class TestAddIndexes extends LuceneTestCase {
public RunAddIndexesThreads(int numCopy) throws Throwable {
NUM_COPY = numCopy;
dir = new MockDirectoryWrapper(new RAMDirectory());
dir = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer())
.setMaxBufferedDocs(2));
@ -600,7 +600,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDoc(writer);
writer.close();
dir2 = new MockDirectoryWrapper(new RAMDirectory());
dir2 = new MockDirectoryWrapper(random, new RAMDirectory());
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
writer2.commit();
@ -619,7 +619,7 @@ public class TestAddIndexes extends LuceneTestCase {
final Directory[] dirs = new Directory[NUM_COPY];
for(int k=0;k<NUM_COPY;k++)
dirs[k] = new MockDirectoryWrapper(new RAMDirectory(dir));
dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(dir));
int j=0;

View File

@ -189,7 +189,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
Directory directory;
// First in a RAM directory:
directory = new MockDirectoryWrapper(new RAMDirectory());
directory = new MockDirectoryWrapper(random, new RAMDirectory());
runTest(directory);
directory.close();

View File

@ -895,7 +895,7 @@ public class TestIndexReader extends LuceneTestCase
// Iterate w/ ever increasing free disk space:
while(!done) {
MockDirectoryWrapper dir = new MockDirectoryWrapper(new RAMDirectory(startDir));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
// If IndexReader hits disk full, it can write to
// the same files again.
@ -941,7 +941,7 @@ public class TestIndexReader extends LuceneTestCase
}
dir.setMaxSizeInBytes(thisDiskFree);
dir.setRandomIOExceptionRate(rate, diskFree);
dir.setRandomIOExceptionRate(rate);
try {
if (0 == x) {

View File

@ -905,7 +905,7 @@ public class TestIndexWriter extends LuceneTestCase {
final class MyRAMDirectory extends MockDirectoryWrapper {
private LockFactory myLockFactory;
MyRAMDirectory(Directory delegate) {
super(delegate);
super(random, delegate);
lockFactory = null;
myLockFactory = new SingleInstanceLockFactory();
}
@ -2083,7 +2083,7 @@ public class TestIndexWriter extends LuceneTestCase {
@Override
public void run() {
// LUCENE-2239: won't work with NIOFS/MMAP
Directory dir = new MockDirectoryWrapper(new RAMDirectory());
Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = null;
while(!finish) {
try {

View File

@ -439,7 +439,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// Iterate w/ ever increasing free disk space:
while (!done) {
MockDirectoryWrapper dir = new MockDirectoryWrapper(new RAMDirectory(startDir));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(
@ -488,7 +488,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
dir.setMaxSizeInBytes(thisDiskFree);
dir.setRandomIOExceptionRate(rate, diskFree);
dir.setRandomIOExceptionRate(rate);
try {
if (0 == x) {

View File

@ -810,18 +810,18 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
w.close();
for(int i=0;i<200;i++) {
MockDirectoryWrapper dir = new MockDirectoryWrapper(new RAMDirectory(startDir));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler());
((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
w = new IndexWriter(dir, conf);
dir.setRandomIOExceptionRate(0.5, 100);
dir.setRandomIOExceptionRate(0.5);
try {
w.optimize();
} catch (IOException ioe) {
if (ioe.getCause() == null)
fail("optimize threw IOException without root cause");
}
dir.setRandomIOExceptionRate(0, 0);
dir.setRandomIOExceptionRate(0);
w.close();
dir.close();
}

View File

@ -54,7 +54,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
while(true) {
if (VERBOSE)
System.out.println("TEST: cycle: diskFree=" + diskFree);
MockDirectoryWrapper dir = new MockDirectoryWrapper(new RAMDirectory());
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
dir.setMaxSizeInBytes(diskFree);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
MergeScheduler ms = writer.getConfig().getMergeScheduler();
@ -213,7 +213,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
while(!done) {
// Make a new dir that will enforce disk usage:
MockDirectoryWrapper dir = new MockDirectoryWrapper(new RAMDirectory(startDir));
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
IOException err = null;
@ -263,7 +263,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
dir.setTrackDiskUsage(true);
dir.setMaxSizeInBytes(thisDiskFree);
dir.setRandomIOExceptionRate(rate, diskFree);
dir.setRandomIOExceptionRate(rate);
try {
@ -390,7 +390,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Make sure we don't hit disk full during close below:
dir.setMaxSizeInBytes(0);
dir.setRandomIOExceptionRate(0.0, 0);
dir.setRandomIOExceptionRate(0.0);
writer.close();

View File

@ -355,7 +355,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
try {
final Directory[] dirs = new Directory[numDirs];
for (int k = 0; k < numDirs; k++)
dirs[k] = new MockDirectoryWrapper(new RAMDirectory(addDir));
dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(addDir));
//int j = 0;
//while (true) {
// System.out.println(Thread.currentThread().getName() + ": iter
@ -631,7 +631,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final Directory[] dirs = new Directory[10];
for (int i=0;i<10;i++) {
dirs[i] = new MockDirectoryWrapper(new RAMDirectory(dir1));
dirs[i] = new MockDirectoryWrapper(random, new RAMDirectory(dir1));
}
IndexReader r = writer.getReader();

View File

@ -50,7 +50,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
private class SeekCountingDirectory extends MockDirectoryWrapper {
public SeekCountingDirectory(Directory delegate) {
super(delegate);
super(random, delegate);
}
@Override

View File

@ -50,7 +50,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
class CountingRAMDirectory extends MockDirectoryWrapper {
public CountingRAMDirectory(Directory delegate) {
super(delegate);
super(random, delegate);
}
public IndexInput openInput(String fileName) throws IOException {

View File

@ -297,7 +297,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
Directory[] indexDirs = {new MockDirectoryWrapper(new RAMDirectory(dir))};
Directory[] indexDirs = {new MockDirectoryWrapper(random, new RAMDirectory(dir))};
writer.addIndexes(indexDirs);
writer.optimize();
writer.close();

View File

@ -41,12 +41,10 @@ public class TestThreadedOptimize extends LuceneTestCase {
//private final static int NUM_THREADS = 5;
private final static int NUM_ITER = 1;
//private final static int NUM_ITER = 10;
private final static int NUM_ITER2 = 1;
//private final static int NUM_ITER2 = 5;
private boolean failed;
private volatile boolean failed;
private void setFailed() {
failed = true;
@ -116,15 +114,16 @@ public class TestThreadedOptimize extends LuceneTestCase {
// System.out.println("TEST: now index=" + writer.segString());
assertEquals(expectedDocCount, writer.maxDoc());
assertEquals("index=" + writer.segString() + " numDocs" + writer.numDocs() + " maxDoc=" + writer.maxDoc() + " config=" + writer.getConfig(), expectedDocCount, writer.numDocs());
assertEquals("index=" + writer.segString() + " numDocs" + writer.numDocs() + " maxDoc=" + writer.maxDoc() + " config=" + writer.getConfig(), expectedDocCount, writer.maxDoc());
writer.close();
writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
IndexReader reader = IndexReader.open(directory, true);
assertTrue(reader.isOptimized());
assertTrue("reader=" + reader, reader.isOptimized());
assertEquals(expectedDocCount, reader.numDocs());
reader.close();
}

View File

@ -193,8 +193,8 @@ public class TestTransactions extends LuceneTestCase {
public void testTransactions() throws Throwable {
// we cant use non-ramdir on windows, because this test needs to double-write.
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(new RAMDirectory());
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(new RAMDirectory());
MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random, new RAMDirectory());
MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random, new RAMDirectory());
dir1.setPreventDoubleWrite(false);
dir2.setPreventDoubleWrite(false);
dir1.failOn(new RandomFailure());

View File

@ -20,6 +20,7 @@ package org.apache.lucene.search;
import java.io.IOException;
import java.util.Set;
import java.util.TreeSet;
import java.util.Random;
import junit.framework.Assert;
@ -79,11 +80,11 @@ public class CheckHits {
* @see Searcher#search(Query,Collector)
* @see #checkHits
*/
public static void checkHitCollector(Query query, String defaultFieldName,
public static void checkHitCollector(Random random, Query query, String defaultFieldName,
Searcher searcher, int[] results)
throws IOException {
QueryUtils.check(query,searcher);
QueryUtils.check(random,query,searcher);
Set<Integer> correct = new TreeSet<Integer>();
for (int i = 0; i < results.length; i++) {
@ -98,7 +99,7 @@ public class CheckHits {
for (int i = -1; i < 2; i++) {
actual.clear();
QueryUtils.wrapSearcher(searcher, i).search(query, c);
QueryUtils.wrapSearcher(random, searcher, i).search(query, c);
Assert.assertEquals("Wrap Searcher " + i + ": " +
query.toString(defaultFieldName),
correct, actual);
@ -109,7 +110,7 @@ public class CheckHits {
for (int i = -1; i < 2; i++) {
actual.clear();
QueryUtils.wrapUnderlyingReader
((IndexSearcher)searcher, i).search(query, c);
(random, (IndexSearcher)searcher, i).search(query, c);
Assert.assertEquals("Wrap Reader " + i + ": " +
query.toString(defaultFieldName),
correct, actual);
@ -153,6 +154,7 @@ public class CheckHits {
* @see #checkHitCollector
*/
public static void checkHits(
Random random,
Query query,
String defaultFieldName,
Searcher searcher,
@ -173,7 +175,7 @@ public class CheckHits {
Assert.assertEquals(query.toString(defaultFieldName), correct, actual);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
/** Tests that a Hits has an expected order of documents */

View File

@ -5,6 +5,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.Random;
import junit.framework.Assert;
@ -99,10 +100,10 @@ public class QueryUtils {
* @see #checkSerialization
* @see #checkEqual
*/
public static void check(Query q1, Searcher s) {
check(q1, s, true);
public static void check(Random random, Query q1, Searcher s) {
check(random, q1, s, true);
}
private static void check(Query q1, Searcher s, boolean wrap) {
private static void check(Random random, Query q1, Searcher s, boolean wrap) {
try {
check(q1);
if (s!=null) {
@ -111,15 +112,15 @@ public class QueryUtils {
checkFirstSkipTo(q1,is);
checkSkipTo(q1,is);
if (wrap) {
check(q1, wrapUnderlyingReader(is, -1), false);
check(q1, wrapUnderlyingReader(is, 0), false);
check(q1, wrapUnderlyingReader(is, +1), false);
check(random, q1, wrapUnderlyingReader(random, is, -1), false);
check(random, q1, wrapUnderlyingReader(random, is, 0), false);
check(random, q1, wrapUnderlyingReader(random, is, +1), false);
}
}
if (wrap) {
check(q1, wrapSearcher(s, -1), false);
check(q1, wrapSearcher(s, 0), false);
check(q1, wrapSearcher(s, +1), false);
check(random,q1, wrapSearcher(random, s, -1), false);
check(random,q1, wrapSearcher(random, s, 0), false);
check(random,q1, wrapSearcher(random, s, +1), false);
}
checkExplanations(q1,s);
checkSerialization(q1,s);
@ -142,7 +143,7 @@ public class QueryUtils {
* @param s the searcher to wrap
* @param edge if negative, s will be the first sub; if 0, s will be in the middle, if positive s will be the last sub
*/
public static IndexSearcher wrapUnderlyingReader(final IndexSearcher s, final int edge)
public static IndexSearcher wrapUnderlyingReader(Random random, final IndexSearcher s, final int edge)
throws IOException {
IndexReader r = s.getIndexReader();
@ -150,19 +151,19 @@ public class QueryUtils {
// we can't put deleted docs before the nested reader, because
// it will throw off the docIds
IndexReader[] readers = new IndexReader[] {
edge < 0 ? r : IndexReader.open(makeEmptyIndex(0), true),
IndexReader.open(makeEmptyIndex(0), true),
edge < 0 ? r : IndexReader.open(makeEmptyIndex(random, 0), true),
IndexReader.open(makeEmptyIndex(random, 0), true),
new MultiReader(new IndexReader[] {
IndexReader.open(makeEmptyIndex(edge < 0 ? 4 : 0), true),
IndexReader.open(makeEmptyIndex(0), true),
0 == edge ? r : IndexReader.open(makeEmptyIndex(0), true)
IndexReader.open(makeEmptyIndex(random, edge < 0 ? 4 : 0), true),
IndexReader.open(makeEmptyIndex(random, 0), true),
0 == edge ? r : IndexReader.open(makeEmptyIndex(random, 0), true)
}),
IndexReader.open(makeEmptyIndex(0 < edge ? 0 : 7), true),
IndexReader.open(makeEmptyIndex(0), true),
IndexReader.open(makeEmptyIndex(random, 0 < edge ? 0 : 7), true),
IndexReader.open(makeEmptyIndex(random, 0), true),
new MultiReader(new IndexReader[] {
IndexReader.open(makeEmptyIndex(0 < edge ? 0 : 5), true),
IndexReader.open(makeEmptyIndex(0), true),
0 < edge ? r : IndexReader.open(makeEmptyIndex(0), true)
IndexReader.open(makeEmptyIndex(random, 0 < edge ? 0 : 5), true),
IndexReader.open(makeEmptyIndex(random, 0), true),
0 < edge ? r : IndexReader.open(makeEmptyIndex(random, 0), true)
})
};
IndexSearcher out = new IndexSearcher(new MultiReader(readers));
@ -178,24 +179,24 @@ public class QueryUtils {
* @param s the Searcher to wrap
* @param edge if negative, s will be the first sub; if 0, s will be in hte middle, if positive s will be the last sub
*/
public static MultiSearcher wrapSearcher(final Searcher s, final int edge)
public static MultiSearcher wrapSearcher(Random random, final Searcher s, final int edge)
throws IOException {
// we can't put deleted docs before the nested reader, because
// it will through off the docIds
Searcher[] searchers = new Searcher[] {
edge < 0 ? s : new IndexSearcher(makeEmptyIndex(0), true),
edge < 0 ? s : new IndexSearcher(makeEmptyIndex(random, 0), true),
new MultiSearcher(new Searcher[] {
new IndexSearcher(makeEmptyIndex(edge < 0 ? 65 : 0), true),
new IndexSearcher(makeEmptyIndex(0), true),
0 == edge ? s : new IndexSearcher(makeEmptyIndex(0), true)
new IndexSearcher(makeEmptyIndex(random, edge < 0 ? 65 : 0), true),
new IndexSearcher(makeEmptyIndex(random, 0), true),
0 == edge ? s : new IndexSearcher(makeEmptyIndex(random, 0), true)
}),
new IndexSearcher(makeEmptyIndex(0 < edge ? 0 : 3), true),
new IndexSearcher(makeEmptyIndex(0), true),
new IndexSearcher(makeEmptyIndex(random, 0 < edge ? 0 : 3), true),
new IndexSearcher(makeEmptyIndex(random, 0), true),
new MultiSearcher(new Searcher[] {
new IndexSearcher(makeEmptyIndex(0 < edge ? 0 : 5), true),
new IndexSearcher(makeEmptyIndex(0), true),
0 < edge ? s : new IndexSearcher(makeEmptyIndex(0), true)
new IndexSearcher(makeEmptyIndex(random, 0 < edge ? 0 : 5), true),
new IndexSearcher(makeEmptyIndex(random, 0), true),
0 < edge ? s : new IndexSearcher(makeEmptyIndex(random, 0), true)
})
};
MultiSearcher out = new MultiSearcher(searchers);
@ -203,9 +204,9 @@ public class QueryUtils {
return out;
}
private static Directory makeEmptyIndex(final int numDeletedDocs)
private static Directory makeEmptyIndex(Random random, final int numDeletedDocs)
throws IOException {
Directory d = new MockDirectoryWrapper(new RAMDirectory());
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer()));
for (int i = 0; i < numDeletedDocs; i++) {

View File

@ -66,13 +66,13 @@ public class TestBoolean2 extends LuceneTestCase {
searcher = new IndexSearcher(directory, true);
// Make big index
dir2 = new MockDirectoryWrapper(new RAMDirectory(directory));
dir2 = new MockDirectoryWrapper(random, new RAMDirectory(directory));
// First multiply small test index:
mulFactor = 1;
int docCount = 0;
do {
final Directory copy = new MockDirectoryWrapper(new RAMDirectory(dir2));
final Directory copy = new MockDirectoryWrapper(random, new RAMDirectory(dir2));
RandomIndexWriter w = new RandomIndexWriter(random, dir2);
w.addIndexes(new Directory[] {copy});
docCount = w.maxDoc();
@ -243,7 +243,7 @@ public class TestBoolean2 extends LuceneTestCase {
// match up.
Sort sort = Sort.INDEXORDER;
QueryUtils.check(q1,searcher);
QueryUtils.check(random, q1,searcher);
TopFieldCollector collector = TopFieldCollector.create(sort, 1000,
false, true, true, true);

View File

@ -85,7 +85,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
printHits(getName(), h, s);
}
assertEquals("result count", expected, h.length);
QueryUtils.check(q,s);
QueryUtils.check(random, q,s);
}
public void testAllOptional() throws Exception {
@ -329,8 +329,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
TopDocs top1 = s.search(q1,null,100);
TopDocs top2 = s.search(q2,null,100);
if (i < 100) {
QueryUtils.check(q1,s);
QueryUtils.check(q2,s);
QueryUtils.check(random, q1,s);
QueryUtils.check(random, q2,s);
}
// The constrained query
// should be a superset to the unconstrained query.

View File

@ -24,11 +24,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
public class TestBooleanOr extends LuceneTestCase {
@ -47,7 +42,7 @@ public class TestBooleanOr extends LuceneTestCase {
private int search(Query q) throws IOException {
QueryUtils.check(q,searcher);
QueryUtils.check(random, q,searcher);
return searcher.search(q, null, 1000).totalHits;
}

View File

@ -162,7 +162,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
dq.add(tq("id", "d1"));
dq.add(tq("dek", "DOES_NOT_EXIST"));
QueryUtils.check(dq, s);
QueryUtils.check(random, dq, s);
final Weight dw = dq.weight(s);
final Scorer ds = dw.scorer(s.getIndexReader(), true, false);
@ -178,7 +178,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
dq.add(tq("dek", "albino"));
dq.add(tq("dek", "DOES_NOT_EXIST"));
QueryUtils.check(dq, s);
QueryUtils.check(random, dq, s);
final Weight dw = dq.weight(s);
final Scorer ds = dw.scorer(s.getIndexReader(), true, false);
@ -192,7 +192,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
q.add(tq("hed", "albino"));
q.add(tq("hed", "elephant"));
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -216,7 +216,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.0f);
q.add(tq("dek", "albino"));
q.add(tq("dek", "elephant"));
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -241,7 +241,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q.add(tq("hed", "elephant"));
q.add(tq("dek", "albino"));
q.add(tq("dek", "elephant"));
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -264,7 +264,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
DisjunctionMaxQuery q = new DisjunctionMaxQuery(0.01f);
q.add(tq("dek", "albino"));
q.add(tq("dek", "elephant"));
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -292,7 +292,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q1.add(tq("hed", "albino"));
q1.add(tq("dek", "albino"));
q.add(q1, BooleanClause.Occur.MUST);// true,false);
QueryUtils.check(q1, s);
QueryUtils.check(random, q1, s);
}
{
@ -300,10 +300,10 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q2.add(tq("hed", "elephant"));
q2.add(tq("dek", "elephant"));
q.add(q2, BooleanClause.Occur.MUST);// true,false);
QueryUtils.check(q2, s);
QueryUtils.check(random, q2, s);
}
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -335,7 +335,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q2.add(tq("dek", "elephant"));
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -371,7 +371,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q2.add(tq("dek", "elephant"));
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
@ -425,7 +425,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q2.add(tq("dek", "elephant"));
q.add(q2, BooleanClause.Occur.SHOULD);// false,false);
}
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;

View File

@ -98,7 +98,7 @@ public class TestExplanations extends LuceneTestCase {
/** check the expDocNrs first, then check the query (and the explanations) */
public void qtest(Query q, int[] expDocNrs) throws Exception {
CheckHits.checkHitCollector(q, FIELD, searcher, expDocNrs);
CheckHits.checkHitCollector(random, q, FIELD, searcher, expDocNrs);
}
/**

View File

@ -110,7 +110,7 @@ public class TestFilteredQuery extends LuceneTestCase {
ScoreDoc[] hits = searcher.search (filteredquery, null, 1000).scoreDocs;
assertEquals (1, hits.length);
assertEquals (1, hits[0].doc);
QueryUtils.check(filteredquery,searcher);
QueryUtils.check(random, filteredquery,searcher);
hits = searcher.search (filteredquery, null, 1000, new Sort(new SortField("sorter", SortField.STRING))).scoreDocs;
assertEquals (1, hits.length);
@ -119,18 +119,18 @@ public class TestFilteredQuery extends LuceneTestCase {
filteredquery = new FilteredQuery (new TermQuery (new Term ("field", "one")), filter);
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
assertEquals (2, hits.length);
QueryUtils.check(filteredquery,searcher);
QueryUtils.check(random, filteredquery,searcher);
filteredquery = new FilteredQuery (new TermQuery (new Term ("field", "x")), filter);
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
assertEquals (1, hits.length);
assertEquals (3, hits[0].doc);
QueryUtils.check(filteredquery,searcher);
QueryUtils.check(random, filteredquery,searcher);
filteredquery = new FilteredQuery (new TermQuery (new Term ("field", "y")), filter);
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
assertEquals (0, hits.length);
QueryUtils.check(filteredquery,searcher);
QueryUtils.check(random, filteredquery,searcher);
// test boost
Filter f = newStaticFilterA();
@ -190,7 +190,7 @@ public class TestFilteredQuery extends LuceneTestCase {
Query filteredquery = new FilteredQuery(rq, filter);
ScoreDoc[] hits = searcher.search(filteredquery, null, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(filteredquery,searcher);
QueryUtils.check(random, filteredquery,searcher);
}
public void testBoolean() throws Exception {
@ -203,7 +203,7 @@ public class TestFilteredQuery extends LuceneTestCase {
bq.add(query, BooleanClause.Occur.MUST);
ScoreDoc[] hits = searcher.search(bq, null, 1000).scoreDocs;
assertEquals(0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
// Make sure BooleanQuery, which does out-of-order
@ -216,7 +216,7 @@ public class TestFilteredQuery extends LuceneTestCase {
bq.add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.SHOULD);
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.document.*;
import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.store.*;
import org.apache.lucene.util.Version;
@ -105,7 +104,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
public void testBarelyCloseEnough() throws Exception {
@ -114,7 +113,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
/**
@ -126,7 +125,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("exact match", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
query = new PhraseQuery();
@ -134,7 +133,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("reverse not exact", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
public void testSlop1() throws Exception {
@ -144,7 +143,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "two"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("in order", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// Ensures slop of 1 does not work for phrases out of order;
@ -155,7 +154,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("reversed, slop not 2 or more", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
/**
@ -167,7 +166,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
query = new PhraseQuery();
@ -176,7 +175,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("not sloppy enough", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
@ -191,7 +190,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("two total moves", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
query = new PhraseQuery();
@ -201,13 +200,13 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("slop of 5 not close enough", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
query.setSlop(6);
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("slop of 6 just right", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
@ -230,7 +229,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field","words"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// StopAnalyzer as of 2.4 does not leave "holes", so this matches.
@ -239,7 +238,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "here"));
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
searcher.close();
@ -270,7 +269,7 @@ public class TestPhraseQuery extends LuceneTestCase {
phraseQuery.add(new Term("source", "info"));
ScoreDoc[] hits = searcher.search(phraseQuery, null, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(phraseQuery,searcher);
QueryUtils.check(random, phraseQuery,searcher);
TermQuery termQuery = new TermQuery(new Term("contents","foobar"));
@ -279,7 +278,7 @@ public class TestPhraseQuery extends LuceneTestCase {
booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST);
hits = searcher.search(booleanQuery, null, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(termQuery,searcher);
QueryUtils.check(random, termQuery,searcher);
searcher.close();
@ -326,7 +325,7 @@ public class TestPhraseQuery extends LuceneTestCase {
booleanQuery.add(termQuery, BooleanClause.Occur.MUST);
hits = searcher.search(booleanQuery, null, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(booleanQuery,searcher);
QueryUtils.check(random, booleanQuery,searcher);
searcher.close();
@ -368,7 +367,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals(1, hits[1].doc);
assertEquals(0.31, hits[2].score, 0.01);
assertEquals(2, hits[2].doc);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
searcher.close();
reader.close();
directory.close();
@ -393,13 +392,13 @@ public class TestPhraseQuery extends LuceneTestCase {
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("slop of 100 just right", 1, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
query.setSlop(99);
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("slop of 99 not enough", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
// work on two docs like this: "phrase exist notexist exist found"
@ -412,7 +411,7 @@ public class TestPhraseQuery extends LuceneTestCase {
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("phrase without repetitions exists in 2 docs", 2, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// phrase with repetitions that exists in 2 docs
query = new PhraseQuery();
@ -423,7 +422,7 @@ public class TestPhraseQuery extends LuceneTestCase {
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("phrase with repetitions exists in two docs", 2, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// phrase I with repetitions that does not exist in any doc
query = new PhraseQuery();
@ -434,7 +433,7 @@ public class TestPhraseQuery extends LuceneTestCase {
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// phrase II with repetitions that does not exist in any doc
query = new PhraseQuery();
@ -446,7 +445,7 @@ public class TestPhraseQuery extends LuceneTestCase {
hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
}
@ -469,7 +468,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("phrase found with exact phrase scorer", 1, hits.length);
float score0 = hits[0].score;
//System.out.println("(exact) field: two three: "+score0);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// search on non palyndrome, find phrase with slop 2, though no slop required here.
query.setSlop(2); // to use sloppy scorer
@ -478,7 +477,7 @@ public class TestPhraseQuery extends LuceneTestCase {
float score1 = hits[0].score;
//System.out.println("(sloppy) field: two three: "+score1);
assertEquals("exact scorer and sloppy scorer score the same when slop does not matter",score0, score1, SCORE_COMP_THRESH);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// search ordered in palyndrome, find it twice
query = new PhraseQuery();
@ -489,7 +488,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("just sloppy enough", 1, hits.length);
//float score2 = hits[0].score;
//System.out.println("palindrome: two three: "+score2);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
@ -503,7 +502,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("just sloppy enough", 1, hits.length);
//float score3 = hits[0].score;
//System.out.println("palindrome: three two: "+score3);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);
@ -530,7 +529,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("phrase found with exact phrase scorer", 1, hits.length);
float score0 = hits[0].score;
//System.out.println("(exact) field: one two three: "+score0);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// just make sure no exc:
searcher.explain(query, 0);
@ -542,7 +541,7 @@ public class TestPhraseQuery extends LuceneTestCase {
float score1 = hits[0].score;
//System.out.println("(sloppy) field: one two three: "+score1);
assertEquals("exact scorer and sloppy scorer score the same when slop does not matter",score0, score1, SCORE_COMP_THRESH);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
// search ordered in palyndrome, find it twice
query = new PhraseQuery();
@ -558,7 +557,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("just sloppy enough", 1, hits.length);
//float score2 = hits[0].score;
//System.out.println("palindrome: one two three: "+score2);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
//assertTrue("ordered scores higher in palindrome",score1+SCORE_COMP_THRESH<score2);
@ -573,7 +572,7 @@ public class TestPhraseQuery extends LuceneTestCase {
assertEquals("just sloppy enough", 1, hits.length);
//float score3 = hits[0].score;
//System.out.println("palindrome: three two one: "+score3);
QueryUtils.check(query,searcher);
QueryUtils.check(random, query,searcher);
//commented out for sloppy-phrase efficiency (issue 736) - see SloppyPhraseScorer.phraseFreq().
//assertTrue("reversed scores higher in palindrome",score1+SCORE_COMP_THRESH<score3);

View File

@ -296,11 +296,11 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
assertEquals("queries should have same #hits",h1.size(),h4CustomAdd.size());
assertEquals("queries should have same #hits",h1.size(),h5CustomMulAdd.size());
QueryUtils.check(q1,s);
QueryUtils.check(q2,s);
QueryUtils.check(q3,s);
QueryUtils.check(q4,s);
QueryUtils.check(q5,s);
QueryUtils.check(random, q1,s);
QueryUtils.check(random, q2,s);
QueryUtils.check(random, q3,s);
QueryUtils.check(random, q4,s);
QueryUtils.check(random, q5,s);
// verify scores ratios
for (final Integer doc : h1.keySet()) {

View File

@ -80,7 +80,7 @@ public class TestFieldScoreQuery extends FunctionTestSetup {
IndexSearcher s = new IndexSearcher(dir, true);
Query q = new FieldScoreQuery(field,tp);
log("test: "+q);
QueryUtils.check(q,s);
QueryUtils.check(random, q,s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
assertEquals("All docs should be matched!",N_DOCS,h.length);
String prevID = "ID"+(N_DOCS+1); // greater than all ids of docs in this test

View File

@ -69,7 +69,7 @@ public class TestOrdValues extends FunctionTestSetup {
Query q = new ValueSourceQuery(vs);
log("test: " + q);
QueryUtils.check(q, s);
QueryUtils.check(random, q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
assertEquals("All docs should be matched!", N_DOCS, h.length);
String prevID = inOrder

View File

@ -37,6 +37,7 @@ import static org.apache.lucene.util.LuceneTestCase.TEST_VERSION_CURRENT;
import java.io.Reader;
import java.io.IOException;
import java.util.Random;
/**
*
@ -111,8 +112,8 @@ public class PayloadHelper {
* @throws IOException
*/
// TODO: randomize
public IndexSearcher setUp(Similarity similarity, int numDocs) throws IOException {
Directory directory = new MockDirectoryWrapper(new RAMDirectory());
public IndexSearcher setUp(Random random, Similarity similarity, int numDocs) throws IOException {
Directory directory = new MockDirectoryWrapper(random, new RAMDirectory());
PayloadAnalyzer analyzer = new PayloadAnalyzer();
// TODO randomize this

View File

@ -282,7 +282,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
assertTrue("hits Size: " + hits.totalHits + " is not: " + 1, hits.totalHits == 1);
int[] results = new int[1];
results[0] = 0;//hits.scoreDocs[0].doc;
CheckHits.checkHitCollector(query, PayloadHelper.NO_PAYLOAD_FIELD, searcher, results);
CheckHits.checkHitCollector(random, query, PayloadHelper.NO_PAYLOAD_FIELD, searcher, results);
}
// must be static for weight serialization tests

View File

@ -562,6 +562,6 @@ public class TestBasics extends LuceneTestCase {
}
private void checkHits(Query query, int[] results) throws IOException {
CheckHits.checkHits(query, "field", searcher, results);
CheckHits.checkHits(random, query, "field", searcher, results);
}
}

View File

@ -124,7 +124,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
}
protected void check(SpanQuery q, int[] docs) throws Exception {
CheckHits.checkHitCollector(q, null, searcher, docs);
CheckHits.checkHitCollector(random, q, null, searcher, docs);
}
public void testRewrite0() throws Exception {

View File

@ -88,7 +88,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
public void testSpanNearQuery() throws Exception {
SpanNearQuery q = makeQuery();
CheckHits.checkHits(q, FIELD, searcher, new int[] {0,1});
CheckHits.checkHits(random, q, FIELD, searcher, new int[] {0,1});
}
public String s(Spans span) {

View File

@ -60,7 +60,7 @@ public class TestPayloadSpans extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
PayloadHelper helper = new PayloadHelper();
searcher = helper.setUp(similarity, 1000);
searcher = helper.setUp(random, similarity, 1000);
indexReader = searcher.getIndexReader();
}

View File

@ -89,7 +89,7 @@ public class TestSpans extends LuceneTestCase {
}
private void checkHits(Query query, int[] results) throws IOException {
CheckHits.checkHits(query, field, searcher, results);
CheckHits.checkHits(random, query, field, searcher, results);
}
private void orderedSlopTest3SQ(

View File

@ -137,7 +137,7 @@ public class TestSpansAdvanced extends LuceneTestCase {
protected static void assertHits(Searcher s, Query query,
final String description, final String[] expectedIds,
final float[] expectedScores) throws IOException {
QueryUtils.check(query, s);
QueryUtils.check(random, query, s);
final float tolerance = 1e-5f;

View File

@ -78,8 +78,9 @@ public class MockDirectoryWrapper extends Directory {
unSyncedFiles = new HashSet<String>();
}
public MockDirectoryWrapper(Directory delegate) {
public MockDirectoryWrapper(Random random, Directory delegate) {
this.delegate = delegate;
this.randomState = random;
init();
}
@ -96,6 +97,7 @@ public class MockDirectoryWrapper extends Directory {
@Deprecated
@Override
public void sync(String name) throws IOException {
maybeYield();
maybeThrowDeterministicException();
if (crashed)
throw new IOException("cannot sync after crash");
@ -105,6 +107,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized void sync(Collection<String> names) throws IOException {
maybeYield();
for (String name : names)
maybeThrowDeterministicException();
if (crashed)
@ -115,6 +118,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public String toString() {
maybeYield();
return "MockDirWrapper(" + delegate + ")";
}
@ -213,10 +217,8 @@ public class MockDirectoryWrapper extends Directory {
* IOException on the first write to an OutputStream based
* on this probability.
*/
public void setRandomIOExceptionRate(double rate, long seed) {
public void setRandomIOExceptionRate(double rate) {
randomIOExceptionRate = rate;
// seed so we have deterministic behaviour:
randomState = new Random(seed);
}
public double getRandomIOExceptionRate() {
return randomIOExceptionRate;
@ -233,6 +235,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized void deleteFile(String name) throws IOException {
maybeYield();
deleteFile(name, false);
}
@ -251,7 +254,14 @@ public class MockDirectoryWrapper extends Directory {
return ioe;
}
private void maybeYield() {
if (randomState.nextBoolean()) {
Thread.yield();
}
}
private synchronized void deleteFile(String name, boolean forced) throws IOException {
maybeYield();
maybeThrowDeterministicException();
@ -277,6 +287,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized IndexOutput createOutput(String name) throws IOException {
maybeYield();
if (crashed)
throw new IOException("cannot createOutput after crash");
init();
@ -317,6 +328,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized IndexInput openInput(String name) throws IOException {
maybeYield();
if (!delegate.fileExists(name))
throw new FileNotFoundException(name);
@ -367,6 +379,7 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized void close() throws IOException {
maybeYield();
if (openFiles == null) {
openFiles = new HashMap<String,Integer>();
openFilesDeleted = new HashSet<String>();
@ -456,56 +469,67 @@ public class MockDirectoryWrapper extends Directory {
@Override
public synchronized String[] listAll() throws IOException {
maybeYield();
return delegate.listAll();
}
@Override
public synchronized boolean fileExists(String name) throws IOException {
maybeYield();
return delegate.fileExists(name);
}
@Override
public synchronized long fileModified(String name) throws IOException {
maybeYield();
return delegate.fileModified(name);
}
@Override
public synchronized void touchFile(String name) throws IOException {
maybeYield();
delegate.touchFile(name);
}
@Override
public synchronized long fileLength(String name) throws IOException {
maybeYield();
return delegate.fileLength(name);
}
@Override
public synchronized Lock makeLock(String name) {
maybeYield();
return delegate.makeLock(name);
}
@Override
public synchronized void clearLock(String name) throws IOException {
maybeYield();
delegate.clearLock(name);
}
@Override
public synchronized void setLockFactory(LockFactory lockFactory) {
maybeYield();
delegate.setLockFactory(lockFactory);
}
@Override
public synchronized LockFactory getLockFactory() {
maybeYield();
return delegate.getLockFactory();
}
@Override
public synchronized String getLockID() {
maybeYield();
return delegate.getLockID();
}
@Override
public synchronized void copy(Directory to, String src, String dest) throws IOException {
maybeYield();
delegate.copy(to, src, dest);
}
}

View File

@ -99,7 +99,14 @@ public class MockIndexOutputWrapper extends IndexOutput {
}
throw new IOException("fake disk full at " + dir.getRecomputedActualSizeInBytes() + " bytes when writing " + name);
} else {
delegate.writeBytes(b, offset, len);
if (dir.randomState.nextBoolean()) {
final int half = len/2;
delegate.writeBytes(b, offset, half);
Thread.yield();
delegate.writeBytes(b, offset+half, len-half);
} else {
delegate.writeBytes(b, offset, len);
}
}
dir.maybeThrowDeterministicException();

View File

@ -40,8 +40,8 @@ public class TestFileSwitchDirectory extends LuceneTestCase {
fileExtensions.add(IndexFileNames.FIELDS_EXTENSION);
fileExtensions.add(IndexFileNames.FIELDS_INDEX_EXTENSION);
Directory primaryDir = new MockDirectoryWrapper(new RAMDirectory());
Directory secondaryDir = new MockDirectoryWrapper(new RAMDirectory());
Directory primaryDir = new MockDirectoryWrapper(random, new RAMDirectory());
Directory secondaryDir = new MockDirectoryWrapper(random, new RAMDirectory());
FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
IndexWriter writer = new IndexWriter(fsd, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));

View File

@ -42,7 +42,7 @@ public class TestLockFactory extends LuceneTestCase {
// methods are called at the right time, locks are created, etc.
public void testCustomLockFactory() throws IOException {
Directory dir = new MockDirectoryWrapper(new RAMDirectory());
Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
MockLockFactory lf = new MockLockFactory();
dir.setLockFactory(lf);
@ -75,7 +75,7 @@ public class TestLockFactory extends LuceneTestCase {
// exceptions raised:
// Verify: NoLockFactory allows two IndexWriters
public void testRAMDirectoryNoLocking() throws IOException {
Directory dir = new MockDirectoryWrapper(new RAMDirectory());
Directory dir = new MockDirectoryWrapper(random, new RAMDirectory());
dir.setLockFactory(NoLockFactory.getNoLockFactory());
assertTrue("RAMDirectory.setLockFactory did not take",

View File

@ -69,7 +69,7 @@ public class TestRAMDirectory extends LuceneTestCase {
public void testRAMDirectory () throws IOException {
Directory dir = FSDirectory.open(indexDir);
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(new RAMDirectory(dir));
MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
// close the underlaying directory
dir.close();
@ -101,7 +101,7 @@ public class TestRAMDirectory extends LuceneTestCase {
public void testRAMDirectorySize() throws IOException, InterruptedException {
Directory dir = FSDirectory.open(indexDir);
final MockDirectoryWrapper ramDir = new MockDirectoryWrapper(new RAMDirectory(dir));
final MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
dir.close();
final IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(

View File

@ -659,7 +659,7 @@ public abstract class LuceneTestCase extends Assert {
public static MockDirectoryWrapper newDirectory(Random r) throws IOException {
StackTraceElement[] stack = new Exception().getStackTrace();
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
MockDirectoryWrapper dir = new MockDirectoryWrapper(impl);
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, stack);
return dir;
}
@ -679,7 +679,7 @@ public abstract class LuceneTestCase extends Assert {
for (String file : d.listAll()) {
d.copy(impl, file, file);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(impl);
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, stack);
return dir;
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.util;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@ -137,7 +136,7 @@ public class TestBitVector extends LuceneTestCase
}
private void doTestWriteRead(int n) throws Exception {
MockDirectoryWrapper d = new MockDirectoryWrapper(new RAMDirectory());
MockDirectoryWrapper d = new MockDirectoryWrapper(random, new RAMDirectory());
d.setPreventDoubleWrite(false);
BitVector bv = new BitVector(n);
// test count when incrementally setting bits
@ -168,7 +167,7 @@ public class TestBitVector extends LuceneTestCase
}
private void doTestDgaps(int size, int count1, int count2) throws IOException {
MockDirectoryWrapper d = new MockDirectoryWrapper(new RAMDirectory());
MockDirectoryWrapper d = new MockDirectoryWrapper(random, new RAMDirectory());
d.setPreventDoubleWrite(false);
BitVector bv = new BitVector(size);
for (int i=0; i<count1; i++) {