LUCENE-9195: more slow tests fixes

This commit is contained in:
Robert Muir 2020-01-31 07:57:34 -05:00
parent ed7f507c3c
commit 9ceaff913e
No known key found for this signature in database
GPG Key ID: 817AE1DD322D7ECA
36 changed files with 242 additions and 316 deletions

View File

@ -83,11 +83,11 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
// TODO: these big methods can easily blow up some of the other ram-hungry codecs...
// for now just keep them here, as we want to test this for this format.
@Slow
public void testSortedSetVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedSetVsStoredFields(atLeast(100), 1, 32766, 16, 100);
int numDocs = TEST_NIGHTLY ? atLeast(100) : atLeast(10);
doTestSortedSetVsStoredFields(numDocs, 1, 32766, 16, 100);
}
}

View File

@ -665,8 +665,7 @@ public class TestAddIndexes extends LuceneTestCase {
volatile boolean didClose;
final DirectoryReader[] readers;
final int NUM_COPY;
final static int NUM_THREADS = 5;
final Thread[] threads = new Thread[NUM_THREADS];
final Thread[] threads;
public RunAddIndexesThreads(int numCopy) throws Throwable {
NUM_COPY = numCopy;
@ -685,11 +684,13 @@ public class TestAddIndexes extends LuceneTestCase {
readers = new DirectoryReader[NUM_COPY];
for(int i=0;i<NUM_COPY;i++)
readers[i] = DirectoryReader.open(dir);
int numThreads = TEST_NIGHTLY ? 5 : 2;
threads = new Thread[numThreads];
}
void launchThreads(final int numIter) {
for(int i=0;i<NUM_THREADS;i++) {
for(int i=0;i<threads.length;i++) {
threads[i] = new Thread() {
@Override
public void run() {
@ -714,13 +715,15 @@ public class TestAddIndexes extends LuceneTestCase {
};
}
for(int i=0;i<NUM_THREADS;i++)
threads[i].start();
for (Thread thread : threads) {
thread.start();
}
}
void joinThreads() throws Exception {
for(int i=0;i<NUM_THREADS;i++)
threads[i].join();
for (Thread thread : threads) {
thread.join();
}
}
void close(boolean doWait) throws Throwable {
@ -815,7 +818,7 @@ public class TestAddIndexes extends LuceneTestCase {
c.joinThreads();
int expectedNumDocs = 100+NUM_COPY*(4*NUM_ITER/5)*RunAddIndexesThreads.NUM_THREADS*RunAddIndexesThreads.NUM_INIT_DOCS;
int expectedNumDocs = 100+NUM_COPY*(4*NUM_ITER/5)*c.threads.length*RunAddIndexesThreads.NUM_INIT_DOCS;
assertEquals("expected num docs don't match - failures: " + c.failures, expectedNumDocs, c.writer2.getDocStats().numDocs);
c.close(true);

View File

@ -16,69 +16,63 @@
*/
package org.apache.lucene.index;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.*;
import org.apache.lucene.store.*;
import org.apache.lucene.util.*;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
public class TestAtomicUpdate extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
volatile boolean failed;
int count;
private static float RUN_TIME_MSEC = atLeast(500);
private TimedThread[] allThreads;
int numIterations;
volatile Throwable failure;
abstract public void doWork() throws Throwable;
abstract public void doWork(int currentIteration) throws IOException;
TimedThread(TimedThread[] threads) {
this.allThreads = threads;
TimedThread(int numIterations) {
this.numIterations = numIterations;
}
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + (long) RUN_TIME_MSEC;
count = 0;
try {
do {
if (anyErrors()) break;
doWork();
count++;
} while(System.currentTimeMillis() < stopTime);
for (int count = 0; count < numIterations; count++) {
doWork(count);
}
} catch (Throwable e) {
System.out.println(Thread.currentThread().getName() + ": exc");
failure = e;
e.printStackTrace(System.out);
failed = true;
throw new RuntimeException(e);
}
}
private boolean anyErrors() {
for(int i=0;i<allThreads.length;i++)
if (allThreads[i] != null && allThreads[i].failed)
return true;
return false;
}
}
private static class IndexerThread extends TimedThread {
IndexWriter writer;
public IndexerThread(IndexWriter writer, TimedThread[] threads) {
super(threads);
public IndexerThread(IndexWriter writer, int numIterations) {
super(numIterations);
this.writer = writer;
}
@Override
public void doWork() throws Exception {
public void doWork(int currentIteration) throws IOException {
// Update all 100 docs...
for(int i=0; i<100; i++) {
Document d = new Document();
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
d.add(new TextField("contents", English.intToEnglish(i+10*count), Field.Store.NO));
d.add(new TextField("contents", English.intToEnglish(i+10*currentIteration), Field.Store.NO));
d.add(new IntPoint("doc", i));
d.add(new IntPoint("doc2d", i, i));
writer.updateDocument(new Term("id", Integer.toString(i)), d);
@ -89,13 +83,13 @@ public class TestAtomicUpdate extends LuceneTestCase {
private static class SearcherThread extends TimedThread {
private Directory directory;
public SearcherThread(Directory directory, TimedThread[] threads) {
super(threads);
public SearcherThread(Directory directory, int numIterations) {
super(numIterations);
this.directory = directory;
}
@Override
public void doWork() throws Throwable {
public void doWork(int currentIteration) throws IOException {
IndexReader r = DirectoryReader.open(directory);
assertEquals(100, r.numDocs());
r.close();
@ -103,12 +97,14 @@ public class TestAtomicUpdate extends LuceneTestCase {
}
/*
Run one indexer and 2 searchers against single index as
stress test.
*/
* Run N indexer and N searchers against single index as
* stress test.
*/
public void runTest(Directory directory) throws Exception {
TimedThread[] threads = new TimedThread[4];
int indexThreads = TEST_NIGHTLY ? 2 : 1;
int searchThreads = TEST_NIGHTLY ? 2 : 1;
int indexIterations = TEST_NIGHTLY ? 10 : 1;
int searchIterations = TEST_NIGHTLY ? 10 : 1;
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(7);
@ -131,36 +127,27 @@ public class TestAtomicUpdate extends LuceneTestCase {
assertEquals(100, r.numDocs());
r.close();
IndexerThread indexerThread = new IndexerThread(writer, threads);
threads[0] = indexerThread;
indexerThread.start();
IndexerThread indexerThread2 = new IndexerThread(writer, threads);
threads[1] = indexerThread2;
indexerThread2.start();
SearcherThread searcherThread1 = new SearcherThread(directory, threads);
threads[2] = searcherThread1;
searcherThread1.start();
SearcherThread searcherThread2 = new SearcherThread(directory, threads);
threads[3] = searcherThread2;
searcherThread2.start();
indexerThread.join();
indexerThread2.join();
searcherThread1.join();
searcherThread2.join();
List<TimedThread> threads = new ArrayList<>();
for (int i = 0; i < indexThreads; i++) {
threads.add(new IndexerThread(writer, indexIterations));
}
for (int i = 0; i < searchThreads; i++) {
threads.add(new SearcherThread(directory, searchIterations));
}
for (TimedThread thread : threads) {
thread.start();
}
for (TimedThread thread : threads) {
thread.join();
}
writer.close();
assertTrue("hit unexpected exception in indexer", !indexerThread.failed);
assertTrue("hit unexpected exception in indexer2", !indexerThread2.failed);
assertTrue("hit unexpected exception in search1", !searcherThread1.failed);
assertTrue("hit unexpected exception in search2", !searcherThread2.failed);
//System.out.println(" Writer: " + indexerThread.count + " iterations");
//System.out.println("Searcher 1: " + searcherThread1.count + " searchers created");
//System.out.println("Searcher 2: " + searcherThread2.count + " searchers created");
for (TimedThread thread : threads) {
if (thread.failure != null) {
throw new RuntimeException("hit exception from " + thread, thread.failure);
}
}
}
/* */

View File

@ -666,13 +666,12 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
int refreshChance = TestUtil.nextInt(random(), 5, 200);
int deleteChance = TestUtil.nextInt(random(), 2, 100);
int idUpto = 0;
int deletedCount = 0;
List<OneSortDoc> docs = new ArrayList<>();
DirectoryReader r = w.getReader();
int numIters = atLeast(1000);
int numIters = TEST_NIGHTLY ? atLeast(1000) : atLeast(100);
for(int iter=0;iter<numIters;iter++) {
BytesRef value = toBytes((long) random().nextInt(valueRange));
if (docs.isEmpty() || random().nextInt(3) == 1) {
@ -977,7 +976,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// create index
final int numFields = TestUtil.nextInt(random(), 1, 4);
final int numDocs = atLeast(2000);
final int numDocs = TEST_NIGHTLY ? atLeast(2000) : atLeast(200);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new StringField("id", "doc" + i, Store.NO));
@ -996,7 +995,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
}
final int numThreads = TestUtil.nextInt(random(), 3, 6);
final int numThreads = TEST_NIGHTLY ? TestUtil.nextInt(random(), 3, 6) : 2;
final CountDownLatch done = new CountDownLatch(numThreads);
final AtomicInteger numUpdates = new AtomicInteger(atLeast(100));

View File

@ -627,7 +627,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
IndexWriter w = new IndexWriter(dir, iwc);
for(int i=0;i<1000;i++) {
int numDocs = TEST_NIGHTLY ? 1000 : 100;
for(int i=0;i<numDocs;i++) {
Document doc = new Document();
doc.add(newStringField("field", ""+i, Field.Store.YES));
w.addDocument(doc);

View File

@ -216,6 +216,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
/*
* Test "by time expiration" deletion policy:
*/
// TODO: this wall-clock-dependent test doesn't seem to actually test any deletionpolicy logic?
@Nightly
public void testExpirationTimeDeletionPolicy() throws IOException, InterruptedException {
final double SECONDS = 2.0;

View File

@ -94,7 +94,7 @@ public class TestDocumentsWriterStallControl extends LuceneTestCase {
}
public void testAccquireReleaseRace() throws InterruptedException {
public void testAcquireReleaseRace() throws InterruptedException {
final DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl();
ctrl.updateStalled(false);
final AtomicBoolean stop = new AtomicBoolean(false);
@ -120,7 +120,7 @@ public class TestDocumentsWriterStallControl extends LuceneTestCase {
}
start(threads);
int iters = atLeast(10000);
int iters = TEST_NIGHTLY ? atLeast(10000) : atLeast(1000);
final float checkPointProbability = TEST_NIGHTLY ? 0.5f : 0.1f;
for (int i = 0; i < iters; i++) {
if (checkPoint.get()) {

View File

@ -27,6 +27,7 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
@ -41,7 +42,8 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
ReaderHolder holder = new ReaderHolder();
ReaderThread[] threads = new ReaderThread[atLeast(3)];
int numReaderThreads = TEST_NIGHTLY ? TestUtil.nextInt(random(), 2, 5) : 2;
ReaderThread[] threads = new ReaderThread[numReaderThreads];
final CountDownLatch latch = new CountDownLatch(1);
WriterThread writerThread = new WriterThread(holder, writer,
atLeast(500), random(), latch);

View File

@ -49,7 +49,7 @@ import org.junit.Test;
@SuppressCodecs("SimpleText") // too slow here
public class TestIndexWriterReader extends LuceneTestCase {
private final int numThreads = TEST_NIGHTLY ? 5 : 3;
private final int numThreads = TEST_NIGHTLY ? 5 : 2;
public static int count(Term t, IndexReader r) throws IOException {
int count = 0;
@ -371,8 +371,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
@Slow
public void testAddIndexesAndDoDeletesThreads() throws Throwable {
final int numIter = 2;
int numDirs = 3;
final int numIter = TEST_NIGHTLY ? 2 : 1;
int numDirs = TEST_NIGHTLY ? 3 : 2;
Directory mainDir = getAssertNoDeletesDirectory(newDirectory());

View File

@ -177,7 +177,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
CharsRefBuilder utf16 = new CharsRefBuilder();
int num = atLeast(100000);
int num = atLeast(10000);
for (int iter = 0; iter < num; iter++) {
boolean hasIllegal = fillUnicode(buffer, expected, 0, 20);

View File

@ -648,7 +648,8 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
try (Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig().setMaxBufferedDocs(-1).setRAMBufferSizeMB(0.00001), useSoftDeletes)) {
Thread[] threads = new Thread[3 + random().nextInt(3)];
int numThreads = TEST_NIGHTLY ? 3 + random().nextInt(3) : 3;
Thread[] threads = new Thread[numThreads];
AtomicInteger done = new AtomicInteger(0);
CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
Document doc = new Document();

View File

@ -470,7 +470,8 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
}
Thread[] threads = new Thread[2 + random().nextInt(3)];
int numThreads = TEST_NIGHTLY ? 2 + random().nextInt(3) : 2;
Thread[] threads = new Thread[numThreads];
CyclicBarrier barrier = new CyclicBarrier(threads.length + 1);
for (int i = 0; i < threads.length; i++) {
threads[i] = new Thread(() -> {

View File

@ -42,55 +42,49 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
);
IndexReader reader = writer.getReader(); // start pooling readers
reader.close();
RunThread[] indexThreads = new RunThread[4];
int numThreads = TEST_NIGHTLY ? 4 : 2;
int numIterations = TEST_NIGHTLY ? 2000 : 50;
RunThread[] indexThreads = new RunThread[numThreads];
for (int x=0; x < indexThreads.length; x++) {
indexThreads[x] = new RunThread(x % 2, writer);
indexThreads[x] = new RunThread(x % 2, writer, numIterations);
indexThreads[x].setName("Thread " + x);
indexThreads[x].start();
}
long startTime = System.currentTimeMillis();
long duration = 1000;
while ((System.currentTimeMillis() - startTime) < duration) {
Thread.sleep(100);
for (RunThread thread : indexThreads) {
thread.join();
}
for (int x=0; x < indexThreads.length; x++) {
indexThreads[x].run = false;
assertNull("Exception thrown: "+indexThreads[x].ex, indexThreads[x].ex);
}
int delCount = 0;
int addCount = 0;
for (int x=0; x < indexThreads.length; x++) {
indexThreads[x].join();
addCount += indexThreads[x].addCount;
delCount += indexThreads[x].delCount;
}
for (int x=0; x < indexThreads.length; x++) {
assertNull("Exception thrown: "+indexThreads[x].ex, indexThreads[x].ex);
}
//System.out.println("addCount:"+addCount);
//System.out.println("delCount:"+delCount);
writer.close();
mainDir.close();
for (RunThread thread : indexThreads) {
if (thread.failure != null) {
throw new RuntimeException("hit exception from " + thread, thread.failure);
}
}
}
public class RunThread extends Thread {
int type;
IndexWriter writer;
volatile boolean run = true;
volatile Throwable ex;
int numIterations;
volatile Throwable failure;
int delCount = 0;
int addCount = 0;
int type;
final Random r = new Random(random().nextLong());
public RunThread(int type, IndexWriter writer) {
public RunThread(int type, IndexWriter writer, int numIterations) {
this.type = type;
this.writer = writer;
this.numIterations = numIterations;
}
@Override
public void run() {
try {
while (run) {
for (int iter = 0; iter < numIterations; iter++) {
//int n = random.nextInt(2);
if (type == 0) {
int i = seq.addAndGet(1);
@ -111,8 +105,8 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
}
} catch (Throwable ex) {
ex.printStackTrace(System.out);
this.ex = ex;
run = false;
this.failure = failure;
throw new RuntimeException(ex);
}
}
}

View File

@ -33,18 +33,13 @@ import org.apache.lucene.search.similarities.ClassicSimilarity;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.TestUtil;
/**
* Test that norms info is preserved during index life - including
* separate norms, addDocument, addIndexes, forceMerge.
*/
@SuppressCodecs({ "Direct", "SimpleText" })
@Slow
public class TestNorms extends LuceneTestCase {
static final String BYTE_TEST_FIELD = "normsTestByte";
@ -75,23 +70,18 @@ public class TestNorms extends LuceneTestCase {
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
final LineFileDocs docs = new LineFileDocs(random);
int num = atLeast(100);
for (int i = 0; i < num; i++) {
Document doc = docs.nextDoc();
Document doc = new Document();
int boost = TestUtil.nextInt(random, 1, 255);
String value = IntStream.range(0, boost).mapToObj(k -> Integer.toString(boost)).collect(Collectors.joining(" "));
Field f = new TextField(BYTE_TEST_FIELD, value, Field.Store.YES);
doc.add(f);
writer.addDocument(doc);
doc.removeField(BYTE_TEST_FIELD);
if (rarely()) {
writer.commit();
}
}
writer.commit();
writer.close();
docs.close();
}

View File

@ -1221,7 +1221,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// create index
final int numFields = TestUtil.nextInt(random(), 1, 4);
final int numDocs = atLeast(2000);
final int numDocs = TEST_NIGHTLY ? atLeast(2000) : atLeast(200);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(new StringField("id", "doc" + i, Store.NO));
@ -1240,7 +1240,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
}
final int numThreads = TestUtil.nextInt(random(), 3, 6);
final int numThreads = TEST_NIGHTLY ? TestUtil.nextInt(random(), 3, 6) : 2;
final CountDownLatch done = new CountDownLatch(numThreads);
final AtomicInteger numUpdates = new AtomicInteger(atLeast(100));

View File

@ -32,7 +32,8 @@ import org.apache.lucene.util.TestUtil;
public class TestStressAdvance extends LuceneTestCase {
public void testStressAdvance() throws Exception {
for(int iter=0;iter<3;iter++) {
int numIters = TEST_NIGHTLY ? 3 : 1;
for(int iter=0;iter<numIters;iter++) {
if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter);
}

View File

@ -710,7 +710,8 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
doTestSimulateAppendOnly(mergePolicy, 100_000_000, 10_000);
}
@Override @Slow
@Override @Nightly
// TODO: this test has bugs that prevent you from lowering the number of docs in the test!
public void testSimulateUpdates() throws IOException {
TieredMergePolicy mergePolicy = mergePolicy();
// Avoid low values of the max merged segment size which prevent this merge policy from scaling well

View File

@ -210,7 +210,7 @@ public class TestAutomatonQuery extends LuceneTestCase {
}
public void testHashCodeWithThreads() throws Exception {
final AutomatonQuery queries[] = new AutomatonQuery[1000];
final AutomatonQuery queries[] = new AutomatonQuery[atLeast(100)];
for (int i = 0; i < queries.length; i++) {
queries[i] = new AutomatonQuery(new Term("bogus", "bogus"), AutomatonTestUtil.randomAutomaton(random()), Integer.MAX_VALUE);
}

View File

@ -542,7 +542,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
IndexWriter w = new IndexWriter(dir, config);
int numDocs = atLeast(1000); // make sure some terms have skip data
int numDocs = TEST_NIGHTLY ? atLeast(1000) : atLeast(100); // at night, make sure some terms have skip data
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
for (int j = 0; j < numFields; j++) {

View File

@ -35,22 +35,28 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestDoubleValuesSource extends LuceneTestCase {
private static final double LEAST_DOUBLE_VALUE = 45.72;
private Directory dir;
private IndexReader reader;
private IndexSearcher searcher;
private static Directory dir;
private static IndexReader reader;
private static IndexSearcher searcher;
@Override
public void setUp() throws Exception {
super.setUp();
@BeforeClass
public static void beforeClass() throws Exception {
dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
int numDocs = TestUtil.nextInt(random(), 2049, 4000);
final int numDocs;
if (TEST_NIGHTLY) {
numDocs = TestUtil.nextInt(random(), 2049, 4000);
} else {
numDocs = atLeast(545);
}
for (int i = 0; i < numDocs; i++) {
Document document = new Document();
document.add(newTextField("english", English.intToEnglish(i), Field.Store.NO));
@ -68,11 +74,13 @@ public class TestDoubleValuesSource extends LuceneTestCase {
searcher = newSearcher(reader);
}
@Override
public void tearDown() throws Exception {
@AfterClass
public static void afterClass() throws Exception {
reader.close();
dir.close();
super.tearDown();
searcher = null;
reader = null;
dir = null;
}
public void testSortMissingZeroDefault() throws Exception {

View File

@ -33,16 +33,18 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestMultiThreadTermVectors extends LuceneTestCase {
private Directory directory;
public int numDocs = 100;
public int numThreads = 3;
private int numDocs;
private int numThreads;
private int numIterations;
@Override
public void setUp() throws Exception {
super.setUp();
numDocs = TEST_NIGHTLY ? 1000 : 50;
numThreads = TEST_NIGHTLY ? 3 : 2;
numIterations = TEST_NIGHTLY ? 100 : 50;
directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
//writer.setNoCFSRatio(0.0);
//writer.infoStream = System.out;
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setTokenized(false);
customType.setStoreTermVectors(true);
@ -53,7 +55,6 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
writer.addDocument(doc);
}
writer.close();
}
@Override
@ -62,29 +63,9 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
super.tearDown();
}
public void test() throws Exception {
IndexReader reader = null;
try {
reader = DirectoryReader.open(directory);
for(int i = 1; i <= numThreads; i++)
testTermPositionVectors(reader, i);
}
catch (IOException ioe) {
fail(ioe.getMessage());
}
finally {
if (reader != null) {
try {
/** close the opened reader */
reader.close();
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
public void test() throws Exception {
try (IndexReader reader = DirectoryReader.open(directory)) {
testTermPositionVectors(reader, numThreads);
}
}
@ -94,91 +75,17 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
mtr[i] = new MultiThreadTermVectorsReader();
mtr[i].init(reader);
}
/** run until all threads finished */
int threadsAlive = mtr.length;
while (threadsAlive > 0) {
//System.out.println("Threads alive");
Thread.sleep(10);
threadsAlive = mtr.length;
for (int i = 0; i < mtr.length; i++) {
if (mtr[i].isAlive() == true) {
break;
}
threadsAlive--;
}
}
long totalTime = 0L;
for (int i = 0; i < mtr.length; i++) {
totalTime += mtr[i].timeElapsed;
mtr[i] = null;
}
//System.out.println("threadcount: " + mtr.length + " average term vector time: " + totalTime/mtr.length);
}
}
class MultiThreadTermVectorsReader implements Runnable {
private IndexReader reader = null;
private Thread t = null;
private final int runsToDo = 100;
long timeElapsed = 0;
public void init(IndexReader reader) {
this.reader = reader;
timeElapsed = 0;
t=new Thread(this);
t.start();
}
public boolean isAlive() {
if (t == null) return false;
return t.isAlive();
}
@Override
public void run() {
try {
// run the test 100 times
for (int i = 0; i < runsToDo; i++)
testTermVectors();
}
catch (Exception e) {
e.printStackTrace();
}
return;
}
private void testTermVectors() throws Exception {
// check:
int numDocs = reader.numDocs();
long start = 0L;
for (int docId = 0; docId < numDocs; docId++) {
start = System.currentTimeMillis();
Fields vectors = reader.getTermVectors(docId);
timeElapsed += System.currentTimeMillis()-start;
// verify vectors result
verifyVectors(vectors, docId);
start = System.currentTimeMillis();
Terms vector = reader.getTermVectors(docId).terms("field");
timeElapsed += System.currentTimeMillis()-start;
verifyVector(vector.iterator(), docId);
for (MultiThreadTermVectorsReader vectorReader : mtr) {
vectorReader.start();
}
for (MultiThreadTermVectorsReader vectorReader : mtr) {
vectorReader.join();
}
}
private void verifyVectors(Fields vectors, int num) throws IOException {
static void verifyVectors(Fields vectors, int num) throws IOException {
for (String field : vectors) {
Terms terms = vectors.terms(field);
assert terms != null;
@ -186,12 +93,42 @@ class MultiThreadTermVectorsReader implements Runnable {
}
}
private void verifyVector(TermsEnum vector, int num) throws IOException {
static void verifyVector(TermsEnum vector, int num) throws IOException {
StringBuilder temp = new StringBuilder();
while(vector.next() != null) {
temp.append(vector.term().utf8ToString());
}
if (!English.intToEnglish(num).trim().equals(temp.toString().trim()))
System.out.println("wrong term result");
assertEquals(English.intToEnglish(num).trim(), temp.toString().trim());
}
class MultiThreadTermVectorsReader extends Thread {
private IndexReader reader = null;
public void init(IndexReader reader) {
this.reader = reader;
}
@Override
public void run() {
try {
for (int i = 0; i < numIterations; i++) {
testTermVectors();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private void testTermVectors() throws Exception {
// check:
int numDocs = reader.numDocs();
for (int docId = 0; docId < numDocs; docId++) {
Fields vectors = reader.getTermVectors(docId);
// verify vectors result
verifyVectors(vectors, docId);
Terms vector = reader.getTermVectors(docId).terms("field");
verifyVector(vector.iterator(), docId);
}
}
}
}

View File

@ -93,7 +93,7 @@ public class TestPrefixQuery extends LuceneTestCase {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
int numTerms = atLeast(10000);
int numTerms = atLeast(1000);
Set<BytesRef> terms = new HashSet<>();
while (terms.size() < numTerms) {
byte[] bytes = new byte[TestUtil.nextInt(random(), 1, 10)];

View File

@ -83,7 +83,7 @@ public class TestSameScoresWithThreads extends LuceneTestCase {
if (!answers.isEmpty()) {
final CountDownLatch startingGun = new CountDownLatch(1);
int numThreads = TestUtil.nextInt(random(), 2, 5);
int numThreads = TEST_NIGHTLY ? TestUtil.nextInt(random(), 2, 5) : 2;
Thread[] threads = new Thread[numThreads];
for(int threadID=0;threadID<numThreads;threadID++) {
Thread thread = new Thread() {

View File

@ -352,8 +352,9 @@ public class TestScorerPerf extends LuceneTestCase {
FixedBitSet[] sets = randBitSets(atLeast(1000), atLeast(10));
doConjunctions(s, sets, atLeast(10000), atLeast(5));
doNestedConjunctions(s, sets, atLeast(10000), atLeast(3), atLeast(3));
int iterations = TEST_NIGHTLY ? atLeast(10000) : atLeast(500);
doConjunctions(s, sets, iterations, atLeast(5));
doNestedConjunctions(s, sets, iterations, atLeast(3), atLeast(3));
}
}
}

View File

@ -27,34 +27,22 @@ import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
@SuppressCodecs({ "SimpleText", "Direct" })
public class TestSearchWithThreads extends LuceneTestCase {
int NUM_DOCS;
static final int NUM_SEARCH_THREADS = 5;
int RUN_TIME_MSEC;
@Override
public void setUp() throws Exception {
super.setUp();
NUM_DOCS = atLeast(10000);
RUN_TIME_MSEC = atLeast(1000);
}
public void test() throws Exception {
final int numThreads = TEST_NIGHTLY ? 5 : 2;
final int numSearches = TEST_NIGHTLY ? atLeast(2000) : atLeast(500);
final int numDocs = TEST_NIGHTLY ? atLeast(10000) : atLeast(200);
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final long startTime = System.currentTimeMillis();
// TODO: replace w/ the @nightly test data; make this
// into an optional @nightly stress test
final Document doc = new Document();
final Field body = newTextField("body", "", Field.Store.NO);
doc.add(body);
final StringBuilder sb = new StringBuilder();
for(int docCount=0;docCount<NUM_DOCS;docCount++) {
for(int docCount=0;docCount<numDocs;docCount++) {
final int numTerms = random().nextInt(10);
for(int termCount=0;termCount<numTerms;termCount++) {
sb.append(random().nextBoolean() ? "aaa" : "bbb");
@ -67,16 +55,13 @@ public class TestSearchWithThreads extends LuceneTestCase {
final IndexReader r = w.getReader();
w.close();
final long endTime = System.currentTimeMillis();
if (VERBOSE) System.out.println("BUILD took " + (endTime-startTime));
final IndexSearcher s = newSearcher(r);
final AtomicBoolean failed = new AtomicBoolean();
final AtomicLong netSearch = new AtomicLong();
Thread[] threads = new Thread[NUM_SEARCH_THREADS];
for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++) {
Thread[] threads = new Thread[numThreads];
for (int threadID = 0; threadID < numThreads; threadID++) {
threads[threadID] = new Thread() {
TotalHitCountCollector col = new TotalHitCountCollector();
@Override
@ -84,13 +69,11 @@ public class TestSearchWithThreads extends LuceneTestCase {
try {
long totHits = 0;
long totSearch = 0;
long stopAt = System.currentTimeMillis() + RUN_TIME_MSEC;
while(System.currentTimeMillis() < stopAt && !failed.get()) {
for (; totSearch < numSearches & !failed.get(); totSearch++) {
s.search(new TermQuery(new Term("body", "aaa")), col);
totHits += col.getTotalHits();
s.search(new TermQuery(new Term("body", "bbb")), col);
totHits += col.getTotalHits();
totSearch++;
}
assertTrue(totSearch > 0 && totHits > 0);
netSearch.addAndGet(totSearch);
@ -111,7 +94,7 @@ public class TestSearchWithThreads extends LuceneTestCase {
t.join();
}
if (VERBOSE) System.out.println(NUM_SEARCH_THREADS + " threads did " + netSearch.get() + " searches");
if (VERBOSE) System.out.println(numThreads + " threads did " + netSearch.get() + " searches");
r.close();
dir.close();

View File

@ -30,6 +30,7 @@ import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterDirectoryReader;
@ -44,7 +45,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.ThreadedIndexingAndSearchingTestCase;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LineFileDocs;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NamedThreadFactory;
@ -551,7 +551,6 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
dir.close();
}
@Slow
public void testConcurrentIndexCloseSearchAndRefresh() throws Exception {
final Directory dir = newFSDirectory(createTempDir());
AtomicReference<IndexWriter> writerRef = new AtomicReference<>();
@ -567,12 +566,14 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
@Override
public void run() {
try {
LineFileDocs docs = new LineFileDocs(random());
long runTimeSec = TEST_NIGHTLY ? atLeast(10) : 1;
long endTime = System.nanoTime() + runTimeSec * 1000000000;
while (System.nanoTime() < endTime) {
int numDocs = TEST_NIGHTLY ? atLeast(20000) : atLeast(200);
for (int i = 0; i < numDocs; i++) {
IndexWriter w = writerRef.get();
w.addDocument(docs.nextDoc());
Document doc = new Document();
doc.add(newTextField("field",
TestUtil.randomAnalysisString(random(), 256, false),
Field.Store.YES));
w.addDocument(doc);
if (random().nextInt(1000) == 17) {
if (random().nextBoolean()) {
w.close();
@ -582,7 +583,6 @@ public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
writerRef.set(new IndexWriter(dir, newIndexWriterConfig(analyzer)));
}
}
docs.close();
if (VERBOSE) {
System.out.println("TEST: index count=" + writerRef.get().getDocStats().maxDoc);
}

View File

@ -152,7 +152,8 @@ public class TestMultiMMap extends BaseDirectoryTestCase {
}
public void testSeekZero() throws Exception {
for (int i = 0; i < 31; i++) {
int upto = TEST_NIGHTLY ? 31 : 3;
for (int i = 0; i < upto; i++) {
MMapDirectory mmapDir = new MMapDirectory(createTempDir("testSeekZero"), 1<<i);
IndexOutput io = mmapDir.createOutput("zeroBytes", newIOContext(random()));
io.close();
@ -164,7 +165,8 @@ public class TestMultiMMap extends BaseDirectoryTestCase {
}
public void testSeekSliceZero() throws Exception {
for (int i = 0; i < 31; i++) {
int upto = TEST_NIGHTLY ? 31 : 3;
for (int i = 0; i < 3; i++) {
MMapDirectory mmapDir = new MMapDirectory(createTempDir("testSeekSliceZero"), 1<<i);
IndexOutput io = mmapDir.createOutput("zeroBytes", newIOContext(random()));
io.close();
@ -269,7 +271,8 @@ public class TestMultiMMap extends BaseDirectoryTestCase {
}
public void testSliceOfSlice() throws Exception {
for (int i = 0; i < 10; i++) {
int upto = TEST_NIGHTLY ? 10 : 8;
for (int i = 0; i < upto; i++) {
MMapDirectory mmapDir = new MMapDirectory(createTempDir("testSliceOfSlice"), 1<<i);
IndexOutput io = mmapDir.createOutput("bytes", newIOContext(random()));
byte bytes[] = new byte[1<<(i+1)]; // make sure we switch buffers

View File

@ -20,7 +20,8 @@ package org.apache.lucene.util;
public class TestBitUtil extends LuceneTestCase {
public void testNextBitSet() {
for (int i = 0; i < 10000; i++) {
int numIterations = atLeast(1000);
for (int i = 0; i < numIterations; i++) {
long[] bits = buildRandomBits();
int numLong = bits.length - 1;
@ -46,7 +47,8 @@ public class TestBitUtil extends LuceneTestCase {
}
public void testPreviousBitSet() {
for (int i = 0; i < 10000; i++) {
int numIterations = atLeast(1000);
for (int i = 0; i < numIterations; i++) {
long[] bits = buildRandomBits();
int numLong = bits.length - 1;

View File

@ -265,7 +265,7 @@ public class TestNumericUtils extends LuceneTestCase {
}
public void testAdd() throws Exception {
int iters = atLeast(10000);
int iters = atLeast(1000);
int numBytes = TestUtil.nextInt(random(), 1, 100);
for(int iter=0;iter<iters;iter++) {
BigInteger v1 = new BigInteger(8*numBytes-1, random());
@ -301,7 +301,7 @@ public class TestNumericUtils extends LuceneTestCase {
}
public void testSubtract() throws Exception {
int iters = atLeast(10000);
int iters = atLeast(1000);
int numBytes = TestUtil.nextInt(random(), 1, 100);
for(int iter=0;iter<iters;iter++) {
BigInteger v1 = new BigInteger(8*numBytes-1, random());

View File

@ -130,7 +130,8 @@ public class TestSloppyMath extends LuceneTestCase {
/** Test this method sorts the same way as real haversin */
public void testHaversinSortKey() {
for (int i = 0; i < 100000; i++) {
int iters = atLeast(10000);
for (int i = 0; i < iters; i++) {
double centerLat = GeoTestUtil.nextLatitude();
double centerLon = GeoTestUtil.nextLongitude();

View File

@ -158,7 +158,8 @@ public class TestWeakIdentityMap extends LuceneTestCase {
public void testConcurrentHashMap() throws Exception {
// don't make threadCount and keyCount random, otherwise easily OOMs or fails otherwise:
final int threadCount = 8, keyCount = 1024;
final int threadCount = TEST_NIGHTLY ? 8 : 2;
final int keyCount = 1024;
final ExecutorService exec = Executors.newFixedThreadPool(threadCount, new NamedThreadFactory("testConcurrentHashMap"));
final WeakIdentityMap<Object,Integer> map =
WeakIdentityMap.newConcurrentHashMap(random().nextBoolean());

View File

@ -64,7 +64,7 @@ public class TestMinimize extends LuceneTestCase {
}
/** n^2 space usage in Hopcroft minimization? */
@Slow
@Nightly
public void testMinimizeHuge() {
new RegExp("+-*(A|.....|BC)*]", RegExp.NONE).toAutomaton(1000000);
}

View File

@ -1241,7 +1241,7 @@ public class RandomPostingsTester {
final boolean alwaysTestMax) throws Exception {
if (options.contains(Option.THREADS)) {
int numThreads = TestUtil.nextInt(random, 2, 5);
int numThreads = LuceneTestCase.TEST_NIGHTLY ? TestUtil.nextInt(random, 2, 5) : 2;
Thread[] threads = new Thread[numThreads];
for(int threadUpto=0;threadUpto<numThreads;threadUpto++) {
threads[threadUpto] = new TestThread(new Random(random.nextLong()), this, fieldsSource, options, maxTestOptions, maxIndexOptions, alwaysTestMax);

View File

@ -321,7 +321,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
}
protected void runSearchThreads(final long stopTimeMS) throws Exception {
final int numThreads = TestUtil.nextInt(random(), 1, 5);
final int numThreads = TEST_NIGHTLY ? TestUtil.nextInt(random(), 1, 5) : 2;
final Thread[] searchThreads = new Thread[numThreads];
final AtomicLong totHits = new AtomicLong();

View File

@ -326,6 +326,7 @@ public class CheckHits {
* @param deep indicates whether a deep comparison of sub-Explanation details should be executed
* @param expl The Explanation to match against score
*/
// TODO: speed up this method to not be so slow
public static void verifyExplanation(String q,
int doc,
float score,

View File

@ -359,7 +359,9 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
if (score != explanation.getValue().doubleValue()) {
fail("expected: " + score + ", got: " + explanation);
}
CheckHits.verifyExplanation("<test query>", 0, score, true, explanation);
if (rarely()) {
CheckHits.verifyExplanation("<test query>", 0, score, true, explanation);
}
// check score(freq-1), given the same norm it should be <= score(freq) [scores non-decreasing for more term occurrences]
final float prevFreq;
@ -380,7 +382,9 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
if (prevScore != prevExplanation.getValue().doubleValue()) {
fail("expected: " + prevScore + ", got: " + prevExplanation);
}
CheckHits.verifyExplanation("test query (prevFreq)", 0, prevScore, true, prevExplanation);
if (rarely()) {
CheckHits.verifyExplanation("test query (prevFreq)", 0, prevScore, true, prevExplanation);
}
if (prevScore > score) {
System.out.println(prevExplanation);
@ -399,8 +403,9 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
if (prevNormScore != prevNormExplanation.getValue().doubleValue()) {
fail("expected: " + prevNormScore + ", got: " + prevNormExplanation);
}
CheckHits.verifyExplanation("test query (prevNorm)", 0, prevNormScore, true, prevNormExplanation);
if (rarely()) {
CheckHits.verifyExplanation("test query (prevNorm)", 0, prevNormScore, true, prevNormExplanation);
}
if (prevNormScore < score) {
System.out.println(prevNormExplanation);
System.out.println(explanation);
@ -421,7 +426,9 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
if (prevTermScore != prevTermExplanation.getValue().doubleValue()) {
fail("expected: " + prevTermScore + ", got: " + prevTermExplanation);
}
CheckHits.verifyExplanation("test query (prevTerm)", 0, prevTermScore, true, prevTermExplanation);
if (rarely()) {
CheckHits.verifyExplanation("test query (prevTerm)", 0, prevTermScore, true, prevTermExplanation);
}
if (prevTermScore < score) {
System.out.println(prevTermExplanation);