LUCENE-9163: test speedup for slowest/pathological tests

Calming down individual test methods with double-digit execution times
after running tests many times.

There are a few more issues remaining, but this solves the majority of them.
This commit is contained in:
Robert Muir 2020-01-22 17:47:38 -05:00
parent 6b3e7feba1
commit 1051db4038
No known key found for this signature in database
GPG Key ID: 817AE1DD322D7ECA
91 changed files with 204 additions and 110 deletions

View File

@ -112,7 +112,7 @@ public class TestArabicAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -81,7 +81,7 @@ public class TestBulgarianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
BulgarianAnalyzer a = new BulgarianAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -168,7 +168,7 @@ public class TestBrazilianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
BrazilianAnalyzer a = new BrazilianAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -61,7 +61,7 @@ public class TestCatalanAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
CatalanAnalyzer a = new CatalanAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -286,7 +286,7 @@ public class TestCJKAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new CJKAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -76,7 +76,7 @@ public class TestSoraniAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new SoraniAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -334,7 +334,7 @@ public class CommonGramsFilterTest extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -375,7 +375,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(tokenizer, dict));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
InputSource is = new InputSource(getClass().getResource("da_UTF8.xml").toExternalForm());

View File

@ -68,7 +68,7 @@ public class TestGermanAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
GermanAnalyzer a = new GermanAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -69,7 +69,7 @@ public class GreekAnalyzerTest extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new GreekAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -58,7 +58,7 @@ public class TestEnglishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new EnglishAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -53,7 +53,7 @@ public class TestKStemmer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
/**

View File

@ -74,7 +74,7 @@ public class TestPorterStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -54,7 +54,7 @@ public class TestSpanishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new SpanishAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -54,7 +54,7 @@ public class TestBasqueAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new BasqueAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -240,7 +240,7 @@ public class TestPersianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
PersianAnalyzer a = new PersianAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -54,7 +54,7 @@ public class TestFinnishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new FinnishAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -168,7 +168,7 @@ public class TestFrenchAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new FrenchAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -71,7 +71,7 @@ public class TestIrishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new IrishAnalyzer();
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -80,7 +80,7 @@ public class TestGalicianMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -1945,7 +1945,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
new ASCIIFoldingFilter(tokenizer, random().nextBoolean()));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -135,7 +135,7 @@ public class TestCapitalizationFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -75,7 +75,7 @@ public class TestHyphenatedWordsFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -64,7 +64,7 @@ public class TestKeepWordFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -58,7 +58,7 @@ public class TestTrimFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new TrimFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -895,7 +895,7 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase {
}
public void testRandomPaths() throws Exception {
int iters = atLeast(100);
int iters = atLeast(10);
for(int iter=0;iter<iters;iter++) {
String text = randomWDFText();
if (VERBOSE) {

View File

@ -88,7 +88,7 @@ public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter);
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -132,7 +132,7 @@ public class TestPatternTokenizer extends BaseTokenStreamTestCase
return new TokenStreamComponents(tokenizer);
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -257,7 +257,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer);
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -202,7 +202,7 @@ public class TestSimplePatternTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer);
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -99,7 +99,7 @@ public class TestReverseStringFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -1111,7 +1111,7 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -132,7 +132,7 @@ public class TestSnowball extends BaseTokenStreamTestCase {
return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
}
};
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 20 * RANDOM_MULTIPLIER);
a.close();
}
}

View File

@ -354,7 +354,7 @@ public class TestUAX29URLEmailAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
public void testMaxTokenLengthDefault() throws Exception {

View File

@ -621,7 +621,7 @@ public class TestUAX29URLEmailTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */

View File

@ -91,7 +91,7 @@ public class TestICUFoldingFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -188,7 +188,7 @@ public class TestICUNormalizer2CharFilter extends BaseTokenStreamTestCase {
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc_cf", Normalizer2.Mode.COMPOSE));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
// huge strings
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
a.close();
@ -205,7 +205,7 @@ public class TestICUNormalizer2CharFilter extends BaseTokenStreamTestCase {
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc", Normalizer2.Mode.DECOMPOSE));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
// huge strings
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
a.close();

View File

@ -91,7 +91,7 @@ public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -103,7 +103,7 @@ public class TestICUTransformFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, transform));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
}

View File

@ -333,7 +333,7 @@ public class TestICUTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */

View File

@ -76,7 +76,7 @@ public class TestJapaneseBaseFormFilter extends BaseTokenStreamTestCase {
}
public void testRandomStrings() throws IOException {
checkRandomData(random(), analyzer, atLeast(1000));
checkRandomData(random(), analyzer, atLeast(200));
}
public void testEmptyTerm() throws IOException {

View File

@ -256,7 +256,12 @@ public class TestJapaneseNumberFilter extends BaseTokenStreamTestCase {
@Test
public void testRandomHugeStrings() throws Exception {
checkRandomData(random(), analyzer, 5 * RANDOM_MULTIPLIER, 8192);
checkRandomData(random(), analyzer, RANDOM_MULTIPLIER, 4096);
}
@Test @Nightly
public void testRandomHugeStringsAtNight() throws Exception {
checkRandomData(random(), analyzer, 3 * RANDOM_MULTIPLIER, 8192);
}
@Test

View File

@ -103,8 +103,8 @@ public class TestJapaneseReadingFormFilter extends BaseTokenStreamTestCase {
public void testRandomData() throws IOException {
Random random = random();
checkRandomData(random, katakanaAnalyzer, 1000*RANDOM_MULTIPLIER);
checkRandomData(random, romajiAnalyzer, 1000*RANDOM_MULTIPLIER);
checkRandomData(random, katakanaAnalyzer, 200*RANDOM_MULTIPLIER);
checkRandomData(random, romajiAnalyzer, 200*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -320,7 +320,16 @@ public class
}
/** blast some random large strings through the analyzer */
@Slow
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
checkRandomData(random, analyzerNoPunct, RANDOM_MULTIPLIER, 4096);
checkRandomData(random, analyzerNormalNBest, RANDOM_MULTIPLIER, 4096);
}
@Nightly
public void testRandomHugeStringsAtNight() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzerNoPunct, 3*RANDOM_MULTIPLIER, 8192);
@ -328,6 +337,22 @@ public class
}
public void testRandomHugeStringsMockGraphAfter() throws Exception {
// Randomly inject graph tokens after JapaneseTokenizer:
Random random = random();
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new JapaneseTokenizer(newAttributeFactory(), readDict(), false, Mode.SEARCH);
TokenStream graph = new MockGraphTokenFilter(random(), tokenizer);
return new TokenStreamComponents(tokenizer, graph);
}
};
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
analyzer.close();
}
@Nightly
public void testRandomHugeStringsMockGraphAfterAtNight() throws Exception {
// Randomly inject graph tokens after JapaneseTokenizer:
Random random = random();
Analyzer analyzer = new Analyzer() {
@ -342,6 +367,7 @@ public class
analyzer.close();
}
public void testLargeDocReliability() throws Exception {
for (int i = 0; i < 10; i++) {
String s = TestUtil.randomUnicodeString(random(), 10000);

View File

@ -80,7 +80,7 @@ public class TestKoreanAnalyzer extends BaseTokenStreamTestCase {
public void testRandom() throws IOException {
Random random = random();
final Analyzer a = new KoreanAnalyzer();
checkRandomData(random, a, atLeast(1000));
checkRandomData(random, a, atLeast(200));
a.close();
}
@ -90,7 +90,15 @@ public class TestKoreanAnalyzer extends BaseTokenStreamTestCase {
public void testRandomHugeStrings() throws Exception {
Random random = random();
final Analyzer a = new KoreanAnalyzer();
checkRandomData(random, a, 2 * RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, RANDOM_MULTIPLIER, 4096);
a.close();
}
@Nightly
public void testRandomHugeStringsAtNight() throws Exception {
Random random = random();
final Analyzer a = new KoreanAnalyzer();
checkRandomData(random, a, 3 * RANDOM_MULTIPLIER, 8192);
a.close();
}

View File

@ -274,6 +274,11 @@ public class TestKoreanNumberFilter extends BaseTokenStreamTestCase {
@Test
public void testRandomHugeStrings() throws Exception {
checkRandomData(random(), analyzer, RANDOM_MULTIPLIER, 4096);
}
@Test @Nightly
public void testRandomHugeStringsAtNight() throws Exception {
checkRandomData(random(), analyzer, 5 * RANDOM_MULTIPLIER, 8192);
}

View File

@ -375,6 +375,14 @@ public class TestKoreanTokenizer extends BaseTokenStreamTestCase {
/** blast some random large strings through the tokenizer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
checkRandomData(random, analyzerUnigram, RANDOM_MULTIPLIER, 4096);
checkRandomData(random, analyzerDecompound, RANDOM_MULTIPLIER, 4096);
}
@Nightly
public void testRandomHugeStringsAtNight() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzerUnigram, 3*RANDOM_MULTIPLIER, 8192);

View File

@ -86,6 +86,7 @@ public class TokenInfoDictionaryTest extends LuceneTestCase {
}
/** enumerates the entire FST/lookup data and just does basic sanity checks */
@Slow
public void testEnumerateAll() throws Exception {
// just for debugging
int numTerms = 0;

View File

@ -94,7 +94,7 @@ public class TestPhoneticFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close();
Analyzer b = new Analyzer() {

View File

@ -255,14 +255,14 @@ public class TestSmartChineseAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer analyzer = new SmartChineseAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close();
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Analyzer analyzer = new SmartChineseAnalyzer();
checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192);
checkRandomData(random(), analyzer, 3 * RANDOM_MULTIPLIER, 8192);
analyzer.close();
}
}

View File

@ -91,10 +91,11 @@ public class BM25NBClassifierTest extends ClassificationTestBase<BytesRef> {
}
}
@Test
@Test @Slow
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
BM25NBClassifier classifier = new BM25NBClassifier(leafReader,
analyzer, null, categoryFieldName, textFieldName);

View File

@ -84,7 +84,8 @@ public class BooleanPerceptronClassifierTest extends ClassificationTestBase<Bool
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
BooleanPerceptronClassifier classifier = new BooleanPerceptronClassifier(leafReader, analyzer, null, 1, null, booleanFieldName, textFieldName);

View File

@ -93,7 +93,8 @@ public class CachingNaiveBayesClassifierTest extends ClassificationTestBase<Byte
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
CachingNaiveBayesClassifier simpleNaiveBayesClassifier = new CachingNaiveBayesClassifier(leafReader,
analyzer, null, categoryFieldName, textFieldName);

View File

@ -67,7 +67,8 @@ public class KNearestFuzzyClassifierTest extends ClassificationTestBase<BytesRef
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
Classifier<BytesRef> classifier = new KNearestFuzzyClassifier(leafReader, null, analyzer, null, 3, categoryFieldName, textFieldName);

View File

@ -123,7 +123,8 @@ public class KNearestNeighborClassifierTest extends ClassificationTestBase<Bytes
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
KNearestNeighborClassifier kNearestNeighborClassifier = new KNearestNeighborClassifier(leafReader, null,
analyzer, null, 1, 1, 1, categoryFieldName, textFieldName);

View File

@ -96,7 +96,8 @@ public class SimpleNaiveBayesClassifierTest extends ClassificationTestBase<Bytes
@Test
public void testPerformance() throws Exception {
MockAnalyzer analyzer = new MockAnalyzer(random());
LeafReader leafReader = getRandomIndex(analyzer, 100);
int numDocs = atLeast(10);
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
try {
SimpleNaiveBayesClassifier simpleNaiveBayesClassifier = new SimpleNaiveBayesClassifier(leafReader,
analyzer, null, categoryFieldName, textFieldName);

View File

@ -66,7 +66,8 @@ public class DataSplitterTest extends LuceneTestCase {
Document doc;
Random rnd = random();
for (int i = 0; i < 1000; i++) {
int numDocs = atLeast(100);
for (int i = 0; i < numDocs; i++) {
doc = new Document();
doc.add(new Field(idFieldName, "id" + Integer.toString(i), ft));
doc.add(new Field(textFieldName, TestUtil.randomUnicodeString(rnd, 1024), ft));

View File

@ -122,6 +122,8 @@ public class TestIndexedDISI extends LuceneTestCase {
}
}
// TODO: can this be toned down?
@Nightly
public void testRandomBlocks() throws IOException {
final int BLOCKS = 5;
FixedBitSet set = createSetWithRandomBlocks(BLOCKS);

View File

@ -87,7 +87,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedSetVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedSetVsStoredFields(atLeast(300), 1, 32766, 16, 100);
doTestSortedSetVsStoredFields(atLeast(100), 1, 32766, 16, 100);
}
}
@ -103,7 +103,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) {
doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
doTestSortedVsStoredFields(atLeast(100), 1d, 1, 32766);
}
}
@ -439,7 +439,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
}
}
@Slow
@Nightly
public void testSortedSetAroundBlockSize() throws IOException {
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
@ -492,7 +492,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
}
}
@Slow
@Nightly
public void testSortedNumericAroundBlockSize() throws IOException {
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
@ -544,7 +544,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
doTestSortedNumericBlocksOfVariousBitsPerValue(() -> TestUtil.nextInt(random(), 0, 2));
}
@Slow
@Nightly
public void testNumericBlocksOfVariousBitsPerValue() throws Exception {
doTestSparseNumericBlocksOfVariousBitsPerValue(1);
}

View File

@ -70,7 +70,7 @@ public abstract class BaseShapeTestCase extends LuceneTestCase {
// A particularly tricky adversary for BKD tree:
public void testSameShapeManyTimes() throws Exception {
int numShapes = atLeast(50);
int numShapes = TEST_NIGHTLY ? atLeast(50) : atLeast(10);
// Every doc has 2 points:
Object theShape = nextShape();

View File

@ -223,7 +223,7 @@ public class TestLatLonShape extends LuceneTestCase {
/** test we can search for a point with a large number of vertices*/
public void testLargeVertexPolygon() throws Exception {
int numVertices = TestUtil.nextInt(random(), 200000, 500000);
int numVertices = TEST_NIGHTLY ? TestUtil.nextInt(random(), 200000, 500000) : TestUtil.nextInt(random(), 20000, 50000);
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setMergeScheduler(new SerialMergeScheduler());
int mbd = iwc.getMaxBufferedDocs();

View File

@ -37,6 +37,17 @@ public class TestTessellator extends LuceneTestCase {
}
public void testSimpleTessellation() throws Exception {
Polygon poly = GeoTestUtil.createRegularPolygon(0.0, 0.0, 100000, 100000);
Polygon inner = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
new double[]{1.0, -1.0, -0.5, -1.0, 1.0, 0.5, 1.0});
Polygon inner2 = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
new double[]{-2.0, -4.0, -3.5, -4.0, -2.0, -2.5, -2.0});
poly = new Polygon(poly.getPolyLats(), poly.getPolyLons(), inner, inner2);
assertTrue(Tessellator.tessellate(poly).size() > 0);
}
@Nightly
public void testSimpleTessellationAtNight() throws Exception {
Polygon poly = GeoTestUtil.createRegularPolygon(0.0, 0.0, 1000000, 1000000);
Polygon inner = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
new double[]{1.0, -1.0, -0.5, -1.0, 1.0, 0.5, 1.0});

View File

@ -486,7 +486,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
doTestOperationsOnDiskFull(false);
}
@Slow
// TODO: can we tone this test down so it isn't crazy slow?
@Nightly
public void testUpdatesOnDiskFull() throws IOException {
doTestOperationsOnDiskFull(true);
}

View File

@ -749,7 +749,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
};
final int NUM_THREAD = 3;
final int NUM_ITER = 100;
final int NUM_ITER = atLeast(10);
for(int i=0;i<2;i++) {
Directory dir = newDirectory();
@ -1950,6 +1950,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
}
// TODO: can be super slow in pathological cases (merge config?)
@Nightly
public void testMergeExceptionIsTragic() throws Exception {
MockDirectoryWrapper dir = newMockDirectory();
final AtomicBoolean didFail = new AtomicBoolean();

View File

@ -1522,7 +1522,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, conf);
// test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
final int numDocs = atLeast(20000);
final int numDocs = TEST_NIGHTLY ? atLeast(20000) : atLeast(200);
final int numNumericFields = atLeast(5);
final int numTerms = TestUtil.nextInt(random, 10, 100); // terms should affect many docs
Set<String> updateTerms = new HashSet<>();

View File

@ -1031,7 +1031,7 @@ public class TestPhraseQuery extends LuceneTestCase {
public void testRandomTopDocs() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
int numDocs = atLeast(128 * 8 * 8 * 3); // make sure some terms have skip data
int numDocs = TEST_NIGHTLY ? atLeast(128 * 8 * 8 * 3) : atLeast(100); // at night, make sure some terms have skip data
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
int numTerms = random().nextInt(1 << random().nextInt(5));

View File

@ -67,7 +67,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
public void testSimple() throws Exception {
final int numNodes = TestUtil.nextInt(random(), 1, 10);
final double runTimeSec = atLeast(3);
final double runTimeSec = TEST_NIGHTLY ? atLeast(5) : atLeast(1);
final int minDocsToMakeTerms = TestUtil.nextInt(random(), 5, 20);

View File

@ -385,7 +385,7 @@ public class TestSynonymQuery extends LuceneTestCase {
public void testRandomTopDocs() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
int numDocs = atLeast(128 * 8 * 8 * 3); // make sure some terms have skip data
int numDocs = TEST_NIGHTLY ? atLeast(128 * 8 * 8 * 3) : atLeast(100); // at night, make sure some terms have skip data
for (int i = 0; i < numDocs; ++i) {
Document doc = new Document();
int numValues = random().nextInt(1 << random().nextInt(5));

View File

@ -87,7 +87,7 @@ public class TestDocIdSetBuilder extends LuceneTestCase {
}
public void testRandom() throws IOException {
final int maxDoc = TestUtil.nextInt(random(), 1, 10000000);
final int maxDoc = TEST_NIGHTLY ? TestUtil.nextInt(random(), 1, 10000000) : TestUtil.nextInt(random(), 1, 100000) ;
for (int i = 1 ; i < maxDoc / 2; i <<=1) {
final int numDocs = TestUtil.nextInt(random(), 1, i);
final FixedBitSet docs = new FixedBitSet(maxDoc);

View File

@ -937,7 +937,7 @@ public class TestPackedInts extends LuceneTestCase {
}
public void testPackedLongValues() {
final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 10000)];
float[] ratioOptions = new float[]{PackedInts.DEFAULT, PackedInts.COMPACT, PackedInts.FAST};
for (int bpv : new int[]{0, 1, 63, 64, RandomNumbers.randomIntBetween(random(), 2, 62)}) {
for (DataType dataType : Arrays.asList(DataType.DELTA_PACKED)) {

View File

@ -83,7 +83,7 @@ public class TestExpressionSorts extends LuceneTestCase {
}
public void testQueries() throws Exception {
int n = atLeast(4);
int n = atLeast(1);
for (int i = 0; i < n; i++) {
assertQuery(new MatchAllDocsQuery());
assertQuery(new TermQuery(new Term("english", "one")));

View File

@ -489,7 +489,7 @@ public class TestDrillSideways extends FacetTestCase {
int numDims = TestUtil.nextInt(random(), 2, 5);
//int numDims = 3;
int numDocs = atLeast(3000);
int numDocs = atLeast(300);
//int numDocs = 20;
if (VERBOSE) {
System.out.println(

View File

@ -226,7 +226,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
final AtomicBoolean stop = new AtomicBoolean();
// How many unique facets to index before stopping:
final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;
final int ordLimit = TEST_NIGHTLY ? 100000 : 600;
Thread indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop);
indexer.start();

View File

@ -706,7 +706,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
final int abOrd = trBase.getOrdinal(abPath);
final int abYoungChildBase1 = ca1.children()[abOrd];
final int numCategories = atLeast(800);
final int numCategories = atLeast(200);
for (int i = 0; i < numCategories; i++) {
twBase.addCategory(new FacetLabel("a", "b", Integer.toString(i)));
}
@ -720,7 +720,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
final ParallelTaxonomyArrays ca2 = trBase.getParallelTaxonomyArrays();
final int abYoungChildBase2 = ca2.children()[abOrd];
int numRetries = atLeast(50);
int numRetries = atLeast(10);
for (int retry = 0; retry < numRetries; retry++) {
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, numCategories);
}

View File

@ -401,7 +401,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
FacetsConfig config = new FacetsConfig();
config.setMultiValued("dim", true);
int numLabels = TestUtil.nextInt(random(), 40000, 100000);
int numLabels = TEST_NIGHTLY ? TestUtil.nextInt(random(), 40000, 100000) : TestUtil.nextInt(random(), 4000, 10000);
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));

View File

@ -247,7 +247,7 @@ public class TestDirectoryTaxonomyWriter extends FacetTestCase {
}
public void testConcurrency() throws Exception {
final int ncats = atLeast(100000); // add many categories
final int ncats = TEST_NIGHTLY ? atLeast(100000) : atLeast(1000); // at night, add many categories
final int range = ncats * 3; // affects the categories selection
final AtomicInteger numCats = new AtomicInteger(ncats);
final Directory dir = newDirectory();

View File

@ -83,8 +83,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
private Set<String> queries = new HashSet<>();
public static final int ITERATIONS = 100 * RANDOM_MULTIPLIER;
@Override
public void setUp() throws Exception {
super.setUp();
@ -115,7 +113,8 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
*/
public void testRandomQueries() throws Exception {
MemoryIndex index = randomMemoryIndex();
for (int i = 0; i < ITERATIONS; i++) {
int iterations = TEST_NIGHTLY ? 100 * RANDOM_MULTIPLIER : 10 * RANDOM_MULTIPLIER;
for (int i = 0; i < iterations; i++) {
assertAgainstDirectory(index);
}
}

View File

@ -73,7 +73,7 @@ public class TestCachePurging extends MonitorTestBase {
}
public void testConcurrentPurges() throws Exception {
int iters = Integer.getInteger("purgeIters", 2);
int iters = Integer.getInteger("purgeIters", 1);
for (int i = 0; i < iters; i++) {
doConcurrentPurgesAndUpdatesTest();
}

View File

@ -157,8 +157,11 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
final List<Shape> queryShapes = new ArrayList<>();
while(querySpatialData.hasNext()) {
queryShapes.add(querySpatialData.next().shape);
queryShapes.add(randomQueryShape());
if (TEST_NIGHTLY) {
queryShapes.add(randomQueryShape());
}
}
queryShapes.add(randomQueryShape());
testOperation(SpatialOperation.Intersects, indexedShapes, queryShapes, random().nextBoolean());
}
}

View File

@ -115,4 +115,22 @@ public class Geo3dShapeWGS84ModelRectRelationTest extends ShapeRectRelationTestC
assertEquals(circle.relate(bPoint), SpatialRelation.CONTAINS);
}
// very slow, test sources are not all here, no clue how to fix it
@Nightly
public void testGeoCircleRect() {
super.testGeoCircleRect();
}
// very slow, test sources are not all here, no clue how to fix it
@Nightly
public void testGeoPolygonRect() {
super.testGeoPolygonRect();
}
// very slow, test sources are not all here, no clue how to fix it
@Nightly
public void testGeoPathRect() {
super.testGeoPathRect();
}
}

View File

@ -46,14 +46,14 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
super(ctx);
}
//20 times each -- should be plenty
//2 times each -- should be plenty
protected int getContainsMinimum(int laps) {
return 20;
return 2;
}
protected int getIntersectsMinimum(int laps) {
return 20;
return 2;
}
// producing "within" cases in Geo3D based on our random shapes doesn't happen often. It'd be nice to increase this.
@ -62,11 +62,11 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
}
protected int getDisjointMinimum(int laps) {
return 20;
return 2;
}
protected int getBoundingMinimum(int laps) {
return 20;
return 2;
}
}
@ -117,6 +117,7 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
}.testRelateWithRectangle();
}
// very slow, and test sources are not here, so no clue how to fix
@Test
public void testGeoPolygonRect() {
new AbstractRectIntersectionTestHelper(ctx) {

View File

@ -189,7 +189,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
/** Tests consistency of GeoArea.getRelationship vs GeoShape.isWithin */
public void testGeo3DRelations() throws Exception {
int numDocs = atLeast(1000);
int numDocs = atLeast(200);
if (VERBOSE) {
System.out.println("TEST: " + numDocs + " docs");
}
@ -208,8 +208,6 @@ public class TestGeo3DPoint extends LuceneTestCase {
int recurseDepth = RandomNumbers.randomIntBetween(random(), 5, 15);
iters = atLeast(50);
for(int iter=0;iter<iters;iter++) {
GeoShape shape = randomShape();
@ -472,7 +470,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
}
public void testRandomMedium() throws Exception {
doTestRandom(10000);
doTestRandom(1000);
}
@Nightly

View File

@ -39,7 +39,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
public class RandomGeo3dShapeGenerator extends LuceneTestCase {
/* Max num of iterations to find right shape under given constrains */
final private static int MAX_SHAPE_ITERATIONS = 50;
final private static int MAX_SHAPE_ITERATIONS = 20;
/* Max num of iterations to find right point under given constrains */
final private static int MAX_POINT_ITERATIONS = 1000;

View File

@ -20,7 +20,6 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.carrotsearch.randomizedtesting.annotations.Repeat;
import com.carrotsearch.randomizedtesting.generators.BiasedNumbers;
import org.junit.Test;
@ -30,7 +29,6 @@ import org.junit.Test;
public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
@Test
@Repeat(iterations = 10)
public void testRandomLUCENE8157() {
final PlanetModel planetModel = randomPlanetModel();
final GeoPoint startPoint = randomGeoPoint(planetModel);
@ -92,7 +90,6 @@ public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
* biased doubles.
*/
@Test
@Repeat(iterations = 10)
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
public void testCompareBigPolygons() {
testComparePolygons(Math.PI);
@ -103,7 +100,6 @@ public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
* biased doubles.
*/
@Test
@Repeat(iterations = 10)
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
public void testCompareSmallPolygons() {
testComparePolygons(1e-4 * Math.PI);

View File

@ -17,7 +17,6 @@
package org.apache.lucene.spatial3d.geom;
import com.carrotsearch.randomizedtesting.annotations.Repeat;
import org.junit.Test;
/**
@ -32,7 +31,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
*
*/
@Test
@Repeat(iterations = 5)
public void testRandomPointWithin() {
int referenceShapeType = CONVEX_POLYGON;
PlanetModel planetModel = randomPlanetModel();
@ -65,7 +63,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
* the original shape.
*
*/
@Repeat(iterations = 5)
public void testRandomPointNotWithin() {
int referenceShapeType = CONVEX_POLYGON;
PlanetModel planetModel = randomPlanetModel();
@ -97,7 +94,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
* Note that both shapes cannot be concave.
*/
@Test
@Repeat(iterations = 5)
public void testRandomDisjoint() {
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
PlanetModel planetModel = randomPlanetModel();
@ -136,7 +132,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
* Note that if the geoAreaShape is not concave the other shape must be not concave.
*/
@Test
@Repeat(iterations = 5)
public void testRandomWithIn() {
PlanetModel planetModel = randomPlanetModel();
int geoAreaShapeType = randomGeoAreaShapeType();
@ -187,7 +182,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
*
*/
@Test
@Repeat(iterations = 1)
public void testRandomContains() {
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
PlanetModel planetModel = randomPlanetModel();
@ -236,7 +230,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
* the geoAreaShape.
*/
@Test
@Repeat(iterations = 5)
public void testRandomOverlaps() {
PlanetModel planetModel = randomPlanetModel();
int geoAreaShapeType = randomGeoAreaShapeType();

View File

@ -549,7 +549,7 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
Directory dir = null;
RandomIndexWriter iw = null;
final String postingsFormat = TestUtil.getPostingsFormat("dummy");
boolean codecOk = iterations * maxWordLength < 100000 || !(postingsFormat.equals("SimpleText"));
boolean codecOk = iterations * maxWordLength < 100000 && !(postingsFormat.equals("SimpleText"));
if (rarely(random) && codecOk) {
dir = newFSDirectory(createTempDir("bttc"));
iw = new RandomIndexWriter(new Random(seed), dir, a);

View File

@ -388,7 +388,8 @@ public abstract class BaseMergePolicyTestCase extends LuceneTestCase {
* Simulate an update use-case where documents are uniformly updated across segments.
*/
public void testSimulateUpdates() throws IOException {
doTestSimulateUpdates(mergePolicy(), 10_000_000, 2500);
int numDocs = atLeast(1_000_000);
doTestSimulateUpdates(mergePolicy(), numDocs, 2500);
}
/**

View File

@ -508,7 +508,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
}
public void testRandomBinaryMedium() throws Exception {
doTestRandomBinary(10000);
doTestRandomBinary(1000);
}
@Nightly

View File

@ -586,6 +586,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
}
}
@Slow
public void testLotsOfFields() throws IOException {
final RandomDocumentFactory docFactory = new RandomDocumentFactory(5000, 10);
for (Options options : validOptions()) {

View File

@ -251,7 +251,7 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
*/
public void testRandomScoring() throws Exception {
Random random = random();
final int iterations = atLeast(3);
final int iterations = atLeast(1);
for (int i = 0; i < iterations; i++) {
// pull a new similarity to switch up parameters
Similarity similarity = getSimilarity(random);

View File

@ -79,7 +79,15 @@ public abstract class BaseDocIdSetTestCase<T extends DocIdSet> extends LuceneTes
copy = copyOf(set, numBits); // then random index
assertEquals(numBits, set, copy);
// test regular increments
int maxIterations = TEST_NIGHTLY ? Integer.MAX_VALUE : 10;
int iterations = 0;
for (int inc = 2; inc < 1000; inc += TestUtil.nextInt(random(), 1, 100)) {
// don't let this test run too many times, even if it gets unlucky with "inc"
if (iterations >= maxIterations) {
break;
}
iterations++;
set = new BitSet(numBits);
for (int d = random().nextInt(10); d < numBits; d += inc) {
set.set(d);