switch over more tests to atLeast()

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1133616 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2011-06-09 00:37:02 +00:00
parent ae2239e134
commit db1d3c2274
53 changed files with 128 additions and 105 deletions

View File

@ -47,7 +47,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
volatile boolean failed;
int count;
private static float RUN_TIME_SEC = 0.5f * RANDOM_MULTIPLIER;
private static float RUN_TIME_MSEC = atLeast(500);
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;
@ -58,7 +58,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + (long) (1000*RUN_TIME_SEC);
final long stopTime = System.currentTimeMillis() + (long) RUN_TIME_MSEC;
count = 0;

View File

@ -516,7 +516,7 @@ public class TestFieldsReader extends LuceneTestCase {
public void testNumericField() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
final int numDocs = _TestUtil.nextInt(random, 500, 1000) * RANDOM_MULTIPLIER;
final int numDocs = atLeast(500);
final Number[] answers = new Number[numDocs];
final NumericField.DataType[] typeAnswers = new NumericField.DataType[numDocs];
for(int id=0;id<numDocs;id++) {

View File

@ -687,7 +687,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testThreadSafety() throws Exception {
final Directory dir = newDirectory();
final int n = 30 * RANDOM_MULTIPLIER;
final int n = atLeast(30);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
for (int i = 0; i < n; i++) {

View File

@ -1524,7 +1524,7 @@ public class TestIndexWriter extends LuceneTestCase {
if (VERBOSE) {
w.w.setInfoStream(System.out);
}
final int docCount = 200*RANDOM_MULTIPLIER;
final int docCount = atLeast(200);
final int fieldCount = _TestUtil.nextInt(rand, 1, 5);
final List<Integer> fieldIDs = new ArrayList<Integer>();
@ -1589,7 +1589,8 @@ public class TestIndexWriter extends LuceneTestCase {
System.out.println("TEST: cycle x=" + x + " r=" + r);
}
for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(1000);
for(int iter=0;iter<num;iter++) {
String testID = idsList[rand.nextInt(idsList.length)];
if (VERBOSE) {
System.out.println("TEST: test id=" + testID);

View File

@ -874,7 +874,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testDeleteAllSlowly() throws Exception {
final Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random, dir);
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
final int NUM_DOCS = atLeast(1000);
final List<Integer> ids = new ArrayList<Integer>(NUM_DOCS);
for(int id=0;id<NUM_DOCS;id++) {
ids.add(id);
@ -917,7 +917,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
w.setInfoStream(VERBOSE ? System.out : null);
Document doc = new Document();
doc.add(newField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO, Field.Index.ANALYZED));
int num = TEST_NIGHTLY ? 6 * RANDOM_MULTIPLIER : 3 * RANDOM_MULTIPLIER;
int num = atLeast(3);
for (int iter = 0; iter < num; iter++) {
int count = 0;

View File

@ -1223,7 +1223,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
FailOnTermVectors[] failures = new FailOnTermVectors[] {
new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE),
new FailOnTermVectors(FailOnTermVectors.INIT_STAGE), };
for (int j = 0; j < 3 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(3);
for (int j = 0; j < num; j++) {
for (FailOnTermVectors failure : failures) {
MockDirectoryWrapper dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(

View File

@ -658,7 +658,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
int num = 100 * RANDOM_MULTIPLIER;
int num = atLeast(100);
for (int i = 0; i < num; i++) {
writer.addDocument(createDocument(i, "test", 4));
}

View File

@ -175,7 +175,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
BytesRef utf8 = new BytesRef(20);
CharsRef utf16 = new CharsRef(20);
int num = 100000 * RANDOM_MULTIPLIER;
int num = atLeast(100000);
for (int iter = 0; iter < num; iter++) {
boolean hasIllegal = fillUnicode(buffer, expected, 0, 20);
@ -281,7 +281,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
char[] chars = new char[2];
final Set<String> allTerms = new HashSet<String>();
int num = 200 * RANDOM_MULTIPLIER;
int num = atLeast(200);
for (int i = 0; i < num; i++) {
final String s;

View File

@ -70,7 +70,7 @@ public class TestLongPostings extends LuceneTestCase {
// randomness (ie same seed will point to same dir):
Directory dir = newFSDirectory(_TestUtil.getTempDir("longpostings" + "." + random.nextLong()));
final int NUM_DOCS = (int) ((TEST_NIGHTLY ? 4e6 : (RANDOM_MULTIPLIER*2e3)) * (1+random.nextDouble()));
final int NUM_DOCS = atLeast(2000);
if (VERBOSE) {
System.out.println("TEST: NUM_DOCS=" + NUM_DOCS);
@ -145,7 +145,8 @@ public class TestLongPostings extends LuceneTestCase {
assertTrue(r.docFreq(new Term("field", s1)) > 0);
assertTrue(r.docFreq(new Term("field", s2)) > 0);
for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(1000);
for(int iter=0;iter<num;iter++) {
final String term;
final boolean doS1;

View File

@ -27,7 +27,7 @@ public class TestMultiFields extends LuceneTestCase {
public void testRandom() throws Exception {
int num = 2 * RANDOM_MULTIPLIER;
int num = atLeast(2);
for (int iter = 0; iter < num; iter++) {
Directory dir = newDirectory();

View File

@ -143,7 +143,7 @@ public class TestNorms extends LuceneTestCase {
}
private void doTestNorms(Random random, Directory dir) throws IOException {
int num = (TEST_NIGHTLY ? 5 : 1) * RANDOM_MULTIPLIER;
int num = atLeast(1);
for (int i=0; i<num; i++) {
addDocs(random, dir,12,true);
verifyIndex(dir);

View File

@ -474,7 +474,7 @@ public class TestPayloads extends LuceneTestCase {
public void testThreadSafety() throws Exception {
final int numThreads = 5;
final int numDocs = 50 * RANDOM_MULTIPLIER;
final int numDocs = atLeast(50);
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
Directory dir = newDirectory();

View File

@ -274,7 +274,7 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
Index[] indexValue = new Index[] { Index.ANALYZED, Index.ANALYZED_NO_NORMS,
Index.NOT_ANALYZED, Index.NOT_ANALYZED_NO_NORMS };
final int docsPerRound = 97;
int numRounds = (TEST_NIGHTLY ? 5 : 1 ) * RANDOM_MULTIPLIER;
int numRounds = atLeast(1);
for (int i = 0; i < numRounds; i++) {
CodecProvider provider = new CodecProvider();
Codec[] codecs = new Codec[] { new StandardCodec(),
@ -285,7 +285,8 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
for (Codec codec : codecs) {
provider.register(codec);
}
for (int j = 0; j < 30 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(30);
for (int j = 0; j < num; j++) {
provider.setFieldCodec("" + j, codecs[random.nextInt(codecs.length)].name);
}
IndexWriterConfig config = newIndexWriterConfig(random,
@ -295,7 +296,8 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
IndexWriter writer = newWriter(dir, config);
for (int j = 0; j < docsPerRound; j++) {
final Document doc = new Document();
for (int k = 0; k < 30 * RANDOM_MULTIPLIER; k++) {
num = atLeast(30);
for (int k = 0; k < num; k++) {
Field field = newField("" + k, _TestUtil
.randomRealisticUnicodeString(random, 128), indexValue[random
.nextInt(indexValue.length)]);

View File

@ -36,7 +36,7 @@ public class TestRollingUpdates extends LuceneTestCase {
final LineFileDocs docs = new LineFileDocs(random);
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
final int SIZE = (TEST_NIGHTLY ? 200 : 20) * RANDOM_MULTIPLIER;
final int SIZE = atLeast(20);
int id = 0;
IndexReader r = null;
final int numUpdates = (int) (SIZE * (2+random.nextDouble()));
@ -82,7 +82,7 @@ public class TestRollingUpdates extends LuceneTestCase {
for (int r = 0; r < 3; r++) {
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
final int numUpdates = (TEST_NIGHTLY ? 200 : 20) * RANDOM_MULTIPLIER;
final int numUpdates = atLeast(20);
int numThreads = _TestUtil.nextInt(random, 2, 6);
IndexingThread[] threads = new IndexingThread[numThreads];
for (int i = 0; i < numThreads; i++) {

View File

@ -40,7 +40,8 @@ public class TestStressAdvance extends LuceneTestCase {
doc.add(f);
final Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS);
doc.add(idField);
for(int id=0;id<5000*RANDOM_MULTIPLIER;id++) {
int num = atLeast(5000);
for(int id=0;id<num;id++) {
if (random.nextInt(4) == 3) {
f.setValue("a");
aDocs.add(id);

View File

@ -27,7 +27,7 @@ public class TestStressIndexing extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
volatile boolean failed;
int count;
private static int RUN_TIME_SEC = 1 * RANDOM_MULTIPLIER;
private static int RUN_TIME_MSEC = atLeast(1000);
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;
@ -38,7 +38,7 @@ public class TestStressIndexing extends LuceneTestCase {
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + 1000*RUN_TIME_SEC;
final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC;
count = 0;

View File

@ -93,7 +93,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
public void testMultiConfig() throws Throwable {
// test lots of smaller different params together
int num = 3 * RANDOM_MULTIPLIER;
int num = atLeast(3);
for (int i = 0; i < num; i++) { // increase iterations for better testing
if (VERBOSE) {
System.out.println("\n\nTEST: top iter=" + i);

View File

@ -66,7 +66,8 @@ public class TestTieredMergePolicy extends LuceneTestCase {
}
public void testPartialOptimize() throws Exception {
for(int iter=0;iter<10*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(10);
for(int iter=0;iter<num;iter++) {
if (VERBOSE) {
System.out.println("TEST: iter=" + iter);
}

View File

@ -42,7 +42,7 @@ public class TestTransactions extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
volatile boolean failed;
private static float RUN_TIME_SEC = 0.5f * RANDOM_MULTIPLIER;
private static float RUN_TIME_MSEC = atLeast(500);
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;
@ -53,7 +53,7 @@ public class TestTransactions extends LuceneTestCase {
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + (long) (1000*RUN_TIME_SEC);
final long stopTime = System.currentTimeMillis() + (long) (RUN_TIME_MSEC);
try {
do {

View File

@ -134,7 +134,7 @@ public class TestSurrogates extends LuceneTestCase {
System.out.println("\nTEST: top now seek");
}
int num = 100 * RANDOM_MULTIPLIER;
int num = atLeast(100);
for (int iter = 0; iter < num; iter++) {
// pick random field+term
@ -197,7 +197,7 @@ public class TestSurrogates extends LuceneTestCase {
}
{
int num = 100 * RANDOM_MULTIPLIER;
int num = atLeast(100);
for (int iter = 0; iter < num; iter++) {
// seek to random spot
@ -287,7 +287,7 @@ public class TestSurrogates extends LuceneTestCase {
for(int f=0;f<numField;f++) {
String field = "f" + f;
final int numTerms = (TEST_NIGHTLY ? 10000 : 1000) * RANDOM_MULTIPLIER;
final int numTerms = atLeast(1000);
final Set<String> uniqueTerms = new HashSet<String>();

View File

@ -67,7 +67,7 @@ public class BaseTestRangeFilter extends LuceneTestCase {
static TestIndex unsignedIndexDir;
static int minId = 0;
static int maxId = TEST_NIGHTLY ? 10000 : 500;
static int maxId = atLeast(500);
static final int intLength = Integer.toString(Integer.MAX_VALUE).length();

View File

@ -232,7 +232,7 @@ public class TestBoolean2 extends LuceneTestCase {
try {
// increase number of iterations for more complete testing
int num = (TEST_NIGHTLY ? 50 : 10) * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i=0; i<num; i++) {
int level = random.nextInt(3);
q1 = randBoolQuery(new Random(random.nextLong()), random.nextBoolean(), level, field, vals, null);

View File

@ -314,7 +314,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
// increase number of iterations for more complete testing
int num = (TEST_NIGHTLY ? 50 : 10) * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i=0; i<num; i++) {
int lev = random.nextInt(maxLev);
final long seed = random.nextLong();

View File

@ -39,7 +39,7 @@ public class TestCustomSearcherSort extends LuceneTestCase {
private IndexReader reader;
private Query query = null;
// reduced from 20000 to 2000 to speed up test...
private final static int INDEX_SIZE = 2000 * RANDOM_MULTIPLIER;
private final static int INDEX_SIZE = atLeast(2000);
/**
* Create index and query for test cases.

View File

@ -33,7 +33,7 @@ import java.io.PrintStream;
public class TestFieldCache extends LuceneTestCase {
protected IndexReader reader;
private static final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
private static final int NUM_DOCS = atLeast(1000);
private String[] unicodeStrings;
private Directory directory;
@ -185,7 +185,8 @@ public class TestFieldCache extends LuceneTestCase {
}
// seek the enum around (note this isn't a great test here)
for (int i = 0; i < 100 * RANDOM_MULTIPLIER; i++) {
int num = atLeast(100);
for (int i = 0; i < num; i++) {
int k = _TestUtil.nextInt(random, 1, nTerms-1);
BytesRef val1 = termsIndex.lookup(k, val);
assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seek(val1));

View File

@ -46,7 +46,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
int num = (TEST_NIGHTLY ? 5000 : 500) * RANDOM_MULTIPLIER;
int num = atLeast(500);
for (int l = 0; l < num; l++) {
Document doc = new Document();
for (int m=0, c=random.nextInt(10); m<=c; m++) {
@ -60,7 +60,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
writer.close();
IndexSearcher searcher=newSearcher(reader);
num = 50 * RANDOM_MULTIPLIER;
num = atLeast(50);
for (int i = 0; i < num; i++) {
int lower=random.nextInt(Integer.MAX_VALUE);
int upper=random.nextInt(Integer.MAX_VALUE);

View File

@ -44,7 +44,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
// shift the starting of the values to the left, to also have negative values:
private static final int startOffset = - 1 << 15;
// number of docs to generate for testing
private static final int noDocs = (TEST_NIGHTLY ? 10000 : 5000) * RANDOM_MULTIPLIER;
private static final int noDocs = atLeast(5000);
private static Directory directory = null;
private static IndexReader reader = null;
@ -336,7 +336,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
String field="field"+precisionStep;
int termCountT=0,termCountC=0;
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
int lower=(int)(random.nextDouble()*noDocs*distance)+startOffset;
int upper=(int)(random.nextDouble()*noDocs*distance)+startOffset;
@ -414,7 +414,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
private void testRangeSplit(int precisionStep) throws Exception {
String field="ascfield"+precisionStep;
// 10 random tests
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i =0; i< num; i++) {
int lower=(int)(random.nextDouble()*noDocs - noDocs/2);
int upper=(int)(random.nextDouble()*noDocs - noDocs/2);
@ -490,7 +490,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
String field="field"+precisionStep;
// 10 random tests, the index order is ascending,
// so using a reverse sort field should retun descending documents
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
int lower=(int)(random.nextDouble()*noDocs*distance)+startOffset;
int upper=(int)(random.nextDouble()*noDocs*distance)+startOffset;

View File

@ -41,7 +41,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
// shift the starting of the values to the left, to also have negative values:
private static final long startOffset = - 1L << 31;
// number of docs to generate for testing
private static final int noDocs = (TEST_NIGHTLY ? 10000 : 5000) * RANDOM_MULTIPLIER;
private static final int noDocs = atLeast(5000);
private static Directory directory = null;
private static IndexReader reader = null;
@ -353,7 +353,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
private void testRandomTrieAndClassicRangeQuery(int precisionStep) throws Exception {
String field="field"+precisionStep;
int termCountT=0,termCountC=0;
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
long lower=(long)(random.nextDouble()*noDocs*distance)+startOffset;
long upper=(long)(random.nextDouble()*noDocs*distance)+startOffset;
@ -436,7 +436,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
private void testRangeSplit(int precisionStep) throws Exception {
String field="ascfield"+precisionStep;
// 10 random tests
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
long lower=(long)(random.nextDouble()*noDocs - noDocs/2);
long upper=(long)(random.nextDouble()*noDocs - noDocs/2);
@ -522,7 +522,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
String field="field"+precisionStep;
// 10 random tests, the index order is ascending,
// so using a reverse sort field should retun descending documents
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
long lower=(long)(random.nextDouble()*noDocs*distance)+startOffset;
long upper=(long)(random.nextDouble()*noDocs*distance)+startOffset;

View File

@ -59,7 +59,7 @@ public class TestPrefixRandom extends LuceneTestCase {
// we generate aweful prefixes: good for testing.
// but for preflex codec, the test can be very slow, so use less iterations.
final String codec = CodecProvider.getDefault().getFieldCodec("field");
int num = codec.equals("PreFlex") ? 200 * RANDOM_MULTIPLIER : 2000 * RANDOM_MULTIPLIER;
int num = codec.equals("PreFlex") ? 200 * RANDOM_MULTIPLIER : atLeast(2000);
for (int i = 0; i < num; i++) {
field.setValue(_TestUtil.randomUnicodeString(random, 10));
writer.addDocument(doc);
@ -114,7 +114,7 @@ public class TestPrefixRandom extends LuceneTestCase {
/** test a bunch of random prefixes */
public void testPrefixes() throws Exception {
int num = 1000 * RANDOM_MULTIPLIER;
int num = atLeast(1000);
for (int i = 0; i < num; i++)
assertSame(_TestUtil.randomUnicodeString(random, 5));
}

View File

@ -98,7 +98,7 @@ public class TestRegexpRandom extends LuceneTestCase {
}
public void testRegexps() throws Exception {
int num = (TEST_NIGHTLY ? 100 : 1) * RANDOM_MULTIPLIER;
int num = atLeast(1);
for (int i = 0; i < num; i++) {
assertPatternHits("NNN", 1);
assertPatternHits(".NN", 10);
@ -106,7 +106,6 @@ public class TestRegexpRandom extends LuceneTestCase {
assertPatternHits("NN.", 10);
}
num = (TEST_NIGHTLY ? 10 : 1) * RANDOM_MULTIPLIER;
for (int i = 0; i < num; i++) {
assertPatternHits(".{1,2}N", 100);
assertPatternHits("N.{1,2}", 100);

View File

@ -140,7 +140,7 @@ public class TestRegexpRandom2 extends LuceneTestCase {
public void testRegexps() throws Exception {
// we generate aweful regexps: good for testing.
// but for preflex codec, the test can be very slow, so use less iterations.
int num = CodecProvider.getDefault().getFieldCodec("field").equals("PreFlex") ? 100 * RANDOM_MULTIPLIER : 1000 * RANDOM_MULTIPLIER;
int num = CodecProvider.getDefault().getFieldCodec("field").equals("PreFlex") ? 100 * RANDOM_MULTIPLIER : atLeast(1000);
for (int i = 0; i < num; i++) {
String reg = AutomatonTestUtil.randomRegexp(random);
assertSame(reg);

View File

@ -314,9 +314,9 @@ public class TestScorerPerf extends LuceneTestCase {
// test many small sets... the bugs will be found on boundary conditions
createDummySearcher();
validate=true;
sets=randBitSets(1000 * RANDOM_MULTIPLIER, 10 * RANDOM_MULTIPLIER);
doConjunctions(10000 * RANDOM_MULTIPLIER, 5 * RANDOM_MULTIPLIER);
doNestedConjunctions(10000 * RANDOM_MULTIPLIER, 3 * RANDOM_MULTIPLIER, 3 * RANDOM_MULTIPLIER);
sets=randBitSets(atLeast(1000), atLeast(10));
doConjunctions(atLeast(10000), atLeast(5));
doNestedConjunctions(atLeast(10000), atLeast(3), atLeast(3));
s.close();
d.close();
}

View File

@ -31,9 +31,9 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestSearchWithThreads extends LuceneTestCase {
final int NUM_DOCS = 10000;
final int NUM_DOCS = atLeast(10000);
final int NUM_SEARCH_THREADS = 5;
final int RUN_TIME_MSEC = 1000 * RANDOM_MULTIPLIER;
final int RUN_TIME_MSEC = atLeast(1000);
public void test() throws Exception {
final Directory dir = newDirectory();
@ -47,7 +47,7 @@ public class TestSearchWithThreads extends LuceneTestCase {
final Field body = newField("body", "", Field.Index.ANALYZED);
doc.add(body);
final StringBuilder sb = new StringBuilder();
for(int docCount=0;docCount<NUM_DOCS*RANDOM_MULTIPLIER;docCount++) {
for(int docCount=0;docCount<NUM_DOCS;docCount++) {
final int numTerms = random.nextInt(10);
for(int termCount=0;termCount<numTerms;termCount++) {
sb.append(random.nextBoolean() ? "aaa" : "bbb");

View File

@ -62,7 +62,7 @@ import org.apache.lucene.util._TestUtil;
public class TestSort extends LuceneTestCase {
private static final int NUM_STRINGS = 6000 * RANDOM_MULTIPLIER;
private static final int NUM_STRINGS = atLeast(6000);
private IndexSearcher full;
private IndexSearcher searchX;
private IndexSearcher searchY;

View File

@ -43,7 +43,8 @@ public class TestSubScorerFreqs extends LuceneTestCase {
RandomIndexWriter w = new RandomIndexWriter(
random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
// make sure we have more than one segment occationally
for (int i = 0; i < 31 * RANDOM_MULTIPLIER; i++) {
int num = atLeast(31);
for (int i = 0; i < num; i++) {
Document doc = new Document();
doc.add(newField("f", "a b c d b c d c d d", Field.Store.NO,
Field.Index.ANALYZED));

View File

@ -142,7 +142,7 @@ public class TestThreadSafe extends LuceneTestCase {
buildDir(dir1, 15, 5, 2000);
// do many small tests so the thread locals go away inbetween
int num = 10 * RANDOM_MULTIPLIER;
int num = atLeast(10);
for (int i = 0; i < num; i++) {
ir1 = IndexReader.open(dir1, false);
doTest(10,10);

View File

@ -99,7 +99,7 @@ public class TestWildcardRandom extends LuceneTestCase {
}
public void testWildcards() throws Exception {;
int num = (TEST_NIGHTLY ? 100 : 1) * RANDOM_MULTIPLIER;
int num = atLeast(1);
for (int i = 0; i < num; i++) {
assertPatternHits("NNN", 1);
assertPatternHits("?NN", 10);
@ -107,7 +107,6 @@ public class TestWildcardRandom extends LuceneTestCase {
assertPatternHits("NN?", 10);
}
num = (TEST_NIGHTLY ? 10 : 1) * RANDOM_MULTIPLIER;
for (int i = 0; i < num; i++) {
assertPatternHits("??N", 100);
assertPatternHits("N??", 100);

View File

@ -37,7 +37,7 @@ import static org.hamcrest.CoreMatchers.*;
public class TestEntryCreators extends LuceneTestCase {
protected IndexReader reader;
private static final int NUM_DOCS = 500 * RANDOM_MULTIPLIER;
private static final int NUM_DOCS = atLeast(500);
private Directory directory;
static class NumberTypeTester {

View File

@ -32,7 +32,8 @@ public class TestCopyBytes extends LuceneTestCase {
@Test
public void testCopyBytes() throws Exception {
for(int iter=0;iter<10*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(10);
for(int iter=0;iter<num;iter++) {
Directory dir = newDirectory();
if (VERBOSE) {
System.out.println("TEST: iter=" + iter + " dir=" + dir);

View File

@ -46,7 +46,8 @@ public class TestMultiMMap extends LuceneTestCase {
}
public void testRandomChunkSizes() throws Exception {
for (int i = 0; i < 10*RANDOM_MULTIPLIER; i++)
int num = atLeast(10);
for (int i = 0; i < num; i++)
assertChunking(random, _TestUtil.nextInt(random, 20, 100));
}
@ -75,7 +76,7 @@ public class TestMultiMMap extends LuceneTestCase {
IndexReader reader = writer.getReader();
writer.close();
int numAsserts = 100*RANDOM_MULTIPLIER;
int numAsserts = atLeast(100);
for (int i = 0; i < numAsserts; i++) {
int docID = random.nextInt(numDocs);
assertEquals("" + docID, reader.document(docID).get("docid"));

View File

@ -80,7 +80,7 @@ public class TestWindowsMMap extends LuceneTestCase {
writer.commit();
IndexSearcher searcher = new IndexSearcher(dir, true);
int num = 1000 * RANDOM_MULTIPLIER;
int num = atLeast(1000);
for(int dx = 0; dx < num; dx ++) {
String f = randomField();
Document doc = new Document();

View File

@ -49,7 +49,7 @@ public class TestArrayUtil extends LuceneTestCase {
}
public void testInvalidElementSizes() {
int num = 10000 * RANDOM_MULTIPLIER;
int num = atLeast(10000);
for (int iter = 0; iter < num; iter++) {
final int minTargetSize = random.nextInt(Integer.MAX_VALUE);
final int elemSize = random.nextInt(11);
@ -126,7 +126,7 @@ public class TestArrayUtil extends LuceneTestCase {
}
public void testQuickSort() {
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
int num = atLeast(50);
for (int i = 0; i < num; i++) {
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
ArrayUtil.quickSort(a1);
@ -155,7 +155,7 @@ public class TestArrayUtil extends LuceneTestCase {
// This is a test for LUCENE-3054 (which fails without the merge sort fall back with stack overflow in most cases)
public void testQuickToMergeSortFallback() {
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
int num = atLeast(50);
for (int i = 0; i < num; i++) {
Integer[] a1 = createSparseRandomArray(40000), a2 = a1.clone();
ArrayUtil.quickSort(a1);
@ -165,7 +165,7 @@ public class TestArrayUtil extends LuceneTestCase {
}
public void testMergeSort() {
int num = (TEST_NIGHTLY ? 500 : 50) * RANDOM_MULTIPLIER;
int num = atLeast(50);
for (int i = 0; i < num; i++) {
Integer[] a1 = createRandomArray(1000), a2 = a1.clone();
ArrayUtil.mergeSort(a1);
@ -185,7 +185,7 @@ public class TestArrayUtil extends LuceneTestCase {
}
public void testInsertionSort() {
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
for (int i = 0, c = atLeast(500); i < c; i++) {
Integer[] a1 = createRandomArray(30), a2 = a1.clone();
ArrayUtil.insertionSort(a1);
Arrays.sort(a2);

View File

@ -65,7 +65,8 @@ public class TestBytesRefHash extends LuceneTestCase {
@Test
public void testSize() {
BytesRef ref = new BytesRef();
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
final int mod = 1+random.nextInt(39);
for (int i = 0; i < 797; i++) {
String str;
@ -97,7 +98,8 @@ public class TestBytesRefHash extends LuceneTestCase {
public void testGet() {
BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef();
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
Map<String, Integer> strings = new HashMap<String, Integer>();
int uniqueCount = 0;
for (int i = 0; i < 797; i++) {
@ -134,7 +136,8 @@ public class TestBytesRefHash extends LuceneTestCase {
@Test
public void testCompact() {
BytesRef ref = new BytesRef();
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
int numEntries = 0;
final int size = 797;
BitSet bits = new BitSet(size);
@ -175,7 +178,8 @@ public class TestBytesRefHash extends LuceneTestCase {
@Test
public void testSort() {
BytesRef ref = new BytesRef();
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
SortedSet<String> strings = new TreeSet<String>();
for (int i = 0; i < 797; i++) {
String str;
@ -212,7 +216,8 @@ public class TestBytesRefHash extends LuceneTestCase {
public void testAdd() {
BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef();
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
Set<String> strings = new HashSet<String>();
int uniqueCount = 0;
for (int i = 0; i < 797; i++) {
@ -274,7 +279,8 @@ public class TestBytesRefHash extends LuceneTestCase {
BytesRef ref = new BytesRef();
BytesRef scratch = new BytesRef();
BytesRefHash offsetHash = newHash(pool);
for (int j = 0; j < 2 * RANDOM_MULTIPLIER; j++) {
int num = atLeast(2);
for (int j = 0; j < num; j++) {
Set<String> strings = new HashSet<String>();
int uniqueCount = 0;
for (int i = 0; i < 797; i++) {

View File

@ -34,7 +34,7 @@ public class TestCollectionUtil extends LuceneTestCase {
}
public void testQuickSort() {
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
for (int i = 0, c = atLeast(500); i < c; i++) {
List<Integer> list1 = createRandomList(1000), list2 = new ArrayList<Integer>(list1);
CollectionUtil.quickSort(list1);
Collections.sort(list2);
@ -53,7 +53,7 @@ public class TestCollectionUtil extends LuceneTestCase {
}
public void testMergeSort() {
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
for (int i = 0, c = atLeast(500); i < c; i++) {
List<Integer> list1 = createRandomList(1000), list2 = new ArrayList<Integer>(list1);
CollectionUtil.mergeSort(list1);
Collections.sort(list2);
@ -72,7 +72,7 @@ public class TestCollectionUtil extends LuceneTestCase {
}
public void testInsertionSort() {
for (int i = 0, c = 500 * RANDOM_MULTIPLIER; i < c; i++) {
for (int i = 0, c = atLeast(500); i < c; i++) {
List<Integer> list1 = createRandomList(30), list2 = new ArrayList<Integer>(list1);
CollectionUtil.insertionSort(list1);
Collections.sort(list2);

View File

@ -172,8 +172,8 @@ public class TestOpenBitSet extends LuceneTestCase {
// large enough to flush obvious bugs, small enough to run in <.5 sec as part of a
// larger testsuite.
public void testSmall() {
doRandomSets(1200 * RANDOM_MULTIPLIER, 1000 * RANDOM_MULTIPLIER, 1);
doRandomSets(1200 * RANDOM_MULTIPLIER, 1000 * RANDOM_MULTIPLIER, 2);
doRandomSets(atLeast(1200), atLeast(1000), 1);
doRandomSets(atLeast(1200), atLeast(1000), 2);
}
public void testBig() {

View File

@ -33,7 +33,7 @@ public class TestPriorityQueue extends LuceneTestCase {
}
public void testPQ() throws Exception {
testPQ(10000 * RANDOM_MULTIPLIER, random);
testPQ(atLeast(10000), random);
}
public static void testPQ(int count, Random gen) {

View File

@ -51,7 +51,8 @@ public class TestRecyclingByteBlockAllocator extends LuceneTestCase {
assertNotNull(block);
final int size = block.length;
for (int i = 0; i < 97 * RANDOM_MULTIPLIER; i++) {
int num = atLeast(97);
for (int i = 0; i < num; i++) {
block = allocator.getByteBlock();
assertNotNull(block);
assertEquals(size, block.length);
@ -71,7 +72,8 @@ public class TestRecyclingByteBlockAllocator extends LuceneTestCase {
assertNotNull(block);
final int size = block.length;
for (int i = 0; i < 97 * RANDOM_MULTIPLIER; i++) {
int numIters = atLeast(97);
for (int i = 0; i < numIters; i++) {
int num = 1 + random.nextInt(39);
for (int j = 0; j < num; j++) {
block = allocator.getByteBlock();
@ -107,7 +109,8 @@ public class TestRecyclingByteBlockAllocator extends LuceneTestCase {
assertNotNull(block);
final int size = block.length;
for (int i = 0; i < 97 * RANDOM_MULTIPLIER; i++) {
int numIters = atLeast(97);
for (int i = 0; i < numIters; i++) {
int num = 1 + random.nextInt(39);
for (int j = 0; j < num; j++) {
block = allocator.getByteBlock();

View File

@ -112,7 +112,7 @@ public class TestSmallFloat extends LuceneTestCase {
// up iterations for more exhaustive test after changing something
int num = 100000 * RANDOM_MULTIPLIER;
int num = atLeast(100000);
for (int i = 0; i < num; i++) {
float f = Float.intBitsToFloat(random.nextInt());
if (Float.isNaN(f)) continue; // skip NaN

View File

@ -88,7 +88,7 @@ public class TestUnicodeUtil extends LuceneTestCase {
public void testCodePointCount() {
BytesRef utf8 = new BytesRef(20);
int num = 50000 * RANDOM_MULTIPLIER;
int num = atLeast(50000);
for (int i = 0; i < num; i++) {
final String s = _TestUtil.randomUnicodeString(random);
UnicodeUtil.UTF16toUTF8(s, 0, s.length(), utf8);
@ -101,7 +101,7 @@ public class TestUnicodeUtil extends LuceneTestCase {
BytesRef utf8 = new BytesRef(20);
IntsRef utf32 = new IntsRef(20);
int[] codePoints = new int[20];
int num = 50000 * RANDOM_MULTIPLIER;
int num = atLeast(50000);
for (int i = 0; i < num; i++) {
final String s = _TestUtil.randomUnicodeString(random);
UnicodeUtil.UTF16toUTF8(s, 0, s.length(), utf8);
@ -168,7 +168,8 @@ public class TestUnicodeUtil extends LuceneTestCase {
}
public void testUTF8UTF16CharsRef() {
for (int i = 0; i < 3989 * RANDOM_MULTIPLIER; i++) {
int num = atLeast(3989);
for (int i = 0; i < num; i++) {
String unicode = _TestUtil.randomRealisticUnicodeString(random);
BytesRef ref = new BytesRef(unicode);
char[] arr = new char[1 + random.nextInt(100)];

View File

@ -90,8 +90,8 @@ public class TestBasicOperations extends LuceneTestCase {
}
public void testGetRandomAcceptedString() throws Throwable {
final int ITER1 = 100 * RANDOM_MULTIPLIER;
final int ITER2 = 100 * RANDOM_MULTIPLIER;
final int ITER1 = atLeast(100);
final int ITER2 = atLeast(100);
for(int i=0;i<ITER1;i++) {
final RegExp re = new RegExp(AutomatonTestUtil.randomRegexp(random), RegExp.NONE);

View File

@ -130,8 +130,8 @@ public class TestUTF32ToUTF8 extends LuceneTestCase {
public void testRandomRanges() throws Exception {
final Random r = random;
int ITERS = 10 * RANDOM_MULTIPLIER;
int ITERS_PER_DFA = 100 * RANDOM_MULTIPLIER;
int ITERS = atLeast(10);
int ITERS_PER_DFA = atLeast(100);
for(int iter=0;iter<ITERS;iter++) {
int x1 = getCodeStart(r);
int x2 = getCodeStart(r);
@ -202,7 +202,7 @@ public class TestUTF32ToUTF8 extends LuceneTestCase {
}
public void testRandomRegexes() throws Exception {
int num = 250 * RANDOM_MULTIPLIER;
int num = atLeast(250);
for (int i = 0; i < num; i++) {
assertAutomaton(new RegExp(AutomatonTestUtil.randomRegexp(random), RegExp.NONE).toAutomaton());
}
@ -213,7 +213,7 @@ public class TestUTF32ToUTF8 extends LuceneTestCase {
ByteRunAutomaton bra = new ByteRunAutomaton(automaton);
final AutomatonTestUtil.RandomAcceptedStrings ras = new AutomatonTestUtil.RandomAcceptedStrings(automaton);
int num = 1000 * RANDOM_MULTIPLIER;
int num = atLeast(1000);
for (int i = 0; i < num; i++) {
final String string;
if (random.nextBoolean()) {

View File

@ -561,7 +561,8 @@ public class TestFSTs extends LuceneTestCase {
System.out.println("TEST: verify random accepted terms");
}
final IntsRef scratch = new IntsRef(10);
for(int iter=0;iter<500*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(500);
for(int iter=0;iter<num;iter++) {
T output = randomAcceptedWord(fst, scratch);
assertTrue("accepted word " + inputToString(inputMode, scratch) + " is not valid", termsMap.containsKey(scratch));
assertEquals(termsMap.get(scratch), output);
@ -572,7 +573,8 @@ public class TestFSTs extends LuceneTestCase {
System.out.println("TEST: verify seek");
}
IntsRefFSTEnum<T> fstEnum = new IntsRefFSTEnum<T>(fst);
for(int iter=0;iter<100*RANDOM_MULTIPLIER;iter++) {
num = atLeast(100);
for(int iter=0;iter<num;iter++) {
if (VERBOSE) {
System.out.println("TEST: iter=" + iter);
}
@ -645,7 +647,8 @@ public class TestFSTs extends LuceneTestCase {
}
// test mixed next/seek
for(int iter=0;iter<100*RANDOM_MULTIPLIER;iter++) {
num = atLeast(100);
for(int iter=0;iter<num;iter++) {
if (VERBOSE) {
System.out.println("TEST: iter " + iter);
}
@ -960,7 +963,7 @@ public class TestFSTs extends LuceneTestCase {
@Nightly
public void testBigSet() throws IOException {
testRandomWords(50000, RANDOM_MULTIPLIER);
testRandomWords(atLeast(50000), atLeast(1));
}
private static String inputToString(int inputMode, IntsRef term) {
@ -1049,7 +1052,8 @@ public class TestFSTs extends LuceneTestCase {
// Now confirm BytesRefFSTEnum and TermsEnum act the
// same:
final BytesRefFSTEnum<Long> fstEnum = new BytesRefFSTEnum<Long>(fst);
for(int iter=0;iter<1000*RANDOM_MULTIPLIER;iter++) {
int num = atLeast(1000);
for(int iter=0;iter<num;iter++) {
final BytesRef randomTerm = new BytesRef(getRandomString());
if (VERBOSE) {

View File

@ -47,7 +47,7 @@ public class TestPackedInts extends LuceneTestCase {
}
public void testPackedInts() throws IOException {
int num = 5 * RANDOM_MULTIPLIER;
int num = atLeast(5);
for (int iter = 0; iter < num; iter++) {
long ceil = 2;
for(int nbits=1;nbits<63;nbits++) {