LUCENE-9167: test speedup for slowest/pathological tests (round 3)

This commit is contained in:
Robert Muir 2020-01-24 08:58:59 -05:00
parent 4d61e4aaab
commit c53cc3edaf
No known key found for this signature in database
GPG Key ID: 817AE1DD322D7ECA
136 changed files with 196 additions and 145 deletions

View File

@ -112,7 +112,7 @@ public class TestArabicAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer(); ArabicAnalyzer a = new ArabicAnalyzer();
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER); checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
a.close(); a.close();
} }
} }

View File

@ -47,7 +47,7 @@ public class TestBengaliAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new BengaliAnalyzer(); Analyzer analyzer = new BengaliAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -293,7 +293,7 @@ public class TestCJKAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Analyzer a = new CJKAnalyzer(); Analyzer a = new CJKAnalyzer();
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random(), a, 10 * RANDOM_MULTIPLIER, 8192);
a.close(); a.close();
} }

View File

@ -170,12 +170,12 @@ public class TestCJKBigramFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomUnibiStrings() throws Exception { public void testRandomUnibiStrings() throws Exception {
checkRandomData(random(), unibiAnalyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), unibiAnalyzer, 200 * RANDOM_MULTIPLIER);
} }
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomUnibiHugeStrings() throws Exception { public void testRandomUnibiHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, unibiAnalyzer, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, unibiAnalyzer, 10 * RANDOM_MULTIPLIER, 8192);
} }
} }

View File

@ -74,7 +74,7 @@ public class TestCJKWidthFilter extends BaseTokenStreamTestCase {
} }
public void testRandomData() throws IOException { public void testRandomData() throws IOException {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -347,7 +347,7 @@ public class CommonGramsFilterTest extends BaseTokenStreamTestCase {
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }
} }

View File

@ -389,7 +389,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter); return new TokenStreamComponents(tokenizer, filter);
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }

View File

@ -180,7 +180,7 @@ public class TestDecimalDigitFilter extends BaseTokenStreamTestCase {
* blast some random strings through the filter * blast some random strings through the filter
*/ */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), tokenized, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), tokenized, 200 * RANDOM_MULTIPLIER);
} }
/** returns a psuedo-random codepoint which is a Decimal Digit */ /** returns a psuedo-random codepoint which is a Decimal Digit */

View File

@ -130,7 +130,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new KeywordAnalyzer(); Analyzer analyzer = new KeywordAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -63,7 +63,7 @@ public class TestCzechAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new CzechAnalyzer(); Analyzer analyzer = new CzechAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -54,7 +54,7 @@ public class TestDanishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new DanishAnalyzer(); Analyzer analyzer = new DanishAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -75,7 +75,7 @@ public class TestGermanLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -87,7 +87,7 @@ public class TestGermanMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -75,7 +75,7 @@ public class TestGermanNormalizationFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -82,7 +82,7 @@ public class TestGermanStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -64,7 +64,7 @@ public class TestEnglishMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -58,7 +58,7 @@ public class TestSpanishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -55,6 +55,6 @@ public class TestPersianCharFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
} }

View File

@ -75,7 +75,7 @@ public class TestFinnishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -205,7 +205,7 @@ public class TestFrenchLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -106,7 +106,7 @@ public class TestFrenchMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -54,7 +54,7 @@ public class TestGalicianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new GalicianAnalyzer(); Analyzer analyzer = new GalicianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -59,7 +59,7 @@ public class TestHindiAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new HindiAnalyzer(); Analyzer analyzer = new HindiAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -54,7 +54,7 @@ public class TestHungarianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new HungarianAnalyzer(); Analyzer analyzer = new HungarianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -88,7 +88,7 @@ public class TestHunspellStemFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, dictionary)); return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, dictionary));
} }
}; };
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }

View File

@ -54,7 +54,7 @@ public class TestArmenianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new ArmenianAnalyzer(); Analyzer analyzer = new ArmenianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -54,7 +54,7 @@ public class TestIndonesianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new IndonesianAnalyzer(); Analyzer analyzer = new IndonesianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -54,7 +54,7 @@ public class TestItalianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new ItalianAnalyzer(); Analyzer analyzer = new ItalianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }

View File

@ -58,7 +58,7 @@ public class TestItalianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -48,6 +48,6 @@ public class TestLithuanianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), new LithuanianAnalyzer(), 1000*RANDOM_MULTIPLIER); checkRandomData(random(), new LithuanianAnalyzer(), 200 * RANDOM_MULTIPLIER);
} }
} }

View File

@ -54,7 +54,7 @@ public class TestLatvianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new LatvianAnalyzer(); Analyzer analyzer = new LatvianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -142,7 +142,7 @@ public class TestRemoveDuplicatesTokenFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
final int numIters = atLeast(10); final int numIters = atLeast(3);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean());
final int numEntries = atLeast(10); final int numEntries = atLeast(10);

View File

@ -130,6 +130,6 @@ public class TestScandinavianFoldingFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomData() throws Exception { public void testRandomData() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
} }

View File

@ -128,6 +128,6 @@ public class TestScandinavianNormalizationFilter extends BaseTokenStreamTestCase
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomData() throws Exception { public void testRandomData() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
} }

View File

@ -440,7 +440,7 @@ public class TestWordDelimiterFilter extends BaseTokenStreamTestCase {
/** blast some enormous random strings through the analyzer */ /** blast some enormous random strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
int numIterations = atLeast(3); int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) { for (int i = 0; i < numIterations; i++) {
final int flags = random().nextInt(512); final int flags = random().nextInt(512);
final CharArraySet protectedWords; final CharArraySet protectedWords;

View File

@ -501,7 +501,7 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase {
/** blast some enormous random strings through the analyzer */ /** blast some enormous random strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
int numIterations = atLeast(3); int numIterations = atLeast(1);
for (int i = 0; i < numIterations; i++) { for (int i = 0; i < numIterations; i++) {
final int flags = random().nextInt(512); final int flags = random().nextInt(512);
final CharArraySet protectedWords; final CharArraySet protectedWords;

View File

@ -194,7 +194,7 @@ public class EdgeNGramTokenFilterTest extends BaseTokenStreamTestCase {
new EdgeNGramTokenFilter(tokenizer, min, max, preserveOriginal)); new EdgeNGramTokenFilter(tokenizer, min, max, preserveOriginal));
} }
}; };
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER); checkRandomData(random(), a, 10*RANDOM_MULTIPLIER);
a.close(); a.close();
} }
} }

View File

@ -196,7 +196,7 @@ public class NGramTokenFilterTest extends BaseTokenStreamTestCase {
new NGramTokenFilter(tokenizer, min, max, preserveOriginal)); new NGramTokenFilter(tokenizer, min, max, preserveOriginal));
} }
}; };
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 20); checkRandomData(random(), a, 10*RANDOM_MULTIPLIER, 20);
a.close(); a.close();
} }
} }

View File

@ -113,7 +113,7 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer); return new TokenStreamComponents(tokenizer, tokenizer);
} }
}; };
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 20); checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER, 20);
checkRandomData(random(), a, 10*RANDOM_MULTIPLIER, 1027); checkRandomData(random(), a, 10*RANDOM_MULTIPLIER, 1027);
a.close(); a.close();
} }

View File

@ -176,7 +176,7 @@ public class TestDutchAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new DutchAnalyzer(); Analyzer analyzer = new DutchAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }

View File

@ -54,7 +54,7 @@ public class TestNorwegianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new NorwegianAnalyzer(); Analyzer analyzer = new NorwegianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -93,7 +93,7 @@ public class TestNorwegianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random, analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -93,7 +93,7 @@ public class TestNorwegianMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random, analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -226,7 +226,7 @@ public class TestPathHierarchyTokenizer extends BaseTokenStreamTestCase {
} }
}; };
// TODO: properly support positionLengthAttribute // TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false); checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER, 20, false, false);
a.close(); a.close();
} }

View File

@ -184,7 +184,7 @@ public class TestReversePathHierarchyTokenizer extends BaseTokenStreamTestCase {
} }
}; };
// TODO: properly support positionLengthAttribute // TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false); checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER, 20, false, false);
a.close(); a.close();
} }

View File

@ -99,7 +99,7 @@ public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter); return new TokenStreamComponents(tokenizer, filter);
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }

View File

@ -142,7 +142,7 @@ public class TestPatternTokenizer extends BaseTokenStreamTestCase
return new TokenStreamComponents(tokenizer); return new TokenStreamComponents(tokenizer);
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }

View File

@ -267,7 +267,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer); return new TokenStreamComponents(tokenizer);
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }

View File

@ -212,7 +212,7 @@ public class TestSimplePatternTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer); return new TokenStreamComponents(tokenizer);
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }

View File

@ -54,7 +54,7 @@ public class TestPortugueseAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new PortugueseAnalyzer(); Analyzer analyzer = new PortugueseAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -119,7 +119,7 @@ public class TestPortugueseLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -93,7 +93,7 @@ public class TestPortugueseMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -93,7 +93,7 @@ public class TestPortugueseStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -54,7 +54,7 @@ public class TestRomanianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new RomanianAnalyzer(); Analyzer analyzer = new RomanianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -59,7 +59,7 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new RussianAnalyzer(); Analyzer analyzer = new RussianAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -75,7 +75,7 @@ public class TestRussianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -1125,7 +1125,7 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer)); return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
} }
}; };
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, a, 3*RANDOM_MULTIPLIER, 8192);
a.close(); a.close();
} }

View File

@ -67,7 +67,7 @@ public class TestSerbianNormalizationFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -67,7 +67,7 @@ public class TestSerbianNormalizationRegularFilter extends BaseTokenStreamTestCa
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -334,14 +334,14 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new ClassicAnalyzer(); Analyzer analyzer = new ClassicAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Analyzer analyzer = new ClassicAnalyzer(); Analyzer analyzer = new ClassicAnalyzer();
checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random(), analyzer, 10 * RANDOM_MULTIPLIER, 8192);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -627,7 +627,7 @@ public class TestUAX29URLEmailTokenizer extends BaseTokenStreamTestCase {
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, a, 3 * RANDOM_MULTIPLIER, 8192);
} }
public void testExampleURLs() throws Exception { public void testExampleURLs() throws Exception {

View File

@ -54,7 +54,7 @@ public class TestSwedishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new SwedishAnalyzer(); Analyzer analyzer = new SwedishAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -75,7 +75,7 @@ public class TestSwedishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -1001,7 +1001,7 @@ public class TestSynonymGraphFilter extends BaseTokenStreamTestCase {
/** simple random test like testRandom2, but for larger docs /** simple random test like testRandom2, but for larger docs
*/ */
public void testRandomHuge() throws Exception { public void testRandomHuge() throws Exception {
final int numIters = atLeast(3); final int numIters = atLeast(1);
for (int i = 0; i < numIters; i++) { for (int i = 0; i < numIters; i++) {
SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean());
final int numEntries = atLeast(10); final int numEntries = atLeast(10);
@ -1406,7 +1406,7 @@ public class TestSynonymGraphFilter extends BaseTokenStreamTestCase {
a = getAnalyzer(b, true); a = getAnalyzer(b, true);
} }
int iters = atLeast(10); int iters = atLeast(1);
for(int iter=0;iter<iters;iter++) { for(int iter=0;iter<iters;iter++) {
String doc = toTokenString(randomBinaryChars(50, 100, bias, 'a')); String doc = toTokenString(randomBinaryChars(50, 100, bias, 'a'));

View File

@ -532,7 +532,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
} }
}; };
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random, analyzer, 200*RANDOM_MULTIPLIER);
} }
} }
*/ */

View File

@ -98,14 +98,14 @@ public class TestThaiAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new ThaiAnalyzer(); Analyzer analyzer = new ThaiAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Analyzer analyzer = new ThaiAnalyzer(); Analyzer analyzer = new ThaiAnalyzer();
checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random(), analyzer, 3*RANDOM_MULTIPLIER, 8192);
analyzer.close(); analyzer.close();
} }

View File

@ -56,7 +56,7 @@ public class TestTurkishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new TurkishAnalyzer(); Analyzer analyzer = new TurkishAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -26,7 +26,7 @@ import org.apache.lucene.util.TestUtil;
public class TestRollingCharBuffer extends LuceneTestCase { public class TestRollingCharBuffer extends LuceneTestCase {
public void test() throws Exception { public void test() throws Exception {
final int ITERS = atLeast(1000); final int ITERS = atLeast(100);
RollingCharBuffer buffer = new RollingCharBuffer(); RollingCharBuffer buffer = new RollingCharBuffer();

View File

@ -138,8 +138,8 @@ public class TestSegmentingTokenizerBase extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), sentence, 10000*RANDOM_MULTIPLIER); checkRandomData(random(), sentence, 200*RANDOM_MULTIPLIER);
checkRandomData(random(), sentenceAndWord, 10000*RANDOM_MULTIPLIER); checkRandomData(random(), sentenceAndWord, 200*RANDOM_MULTIPLIER);
} }
// some tokenizers for testing // some tokenizers for testing

View File

@ -192,7 +192,7 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
} }
}; };
// TODO: properly support positionLengthAttribute // TODO: properly support positionLengthAttribute
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false); checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER, 20, false, false);
a.close(); a.close();
} }
@ -208,7 +208,7 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
} }
}; };
// TODO: properly support positionLengthAttribute // TODO: properly support positionLengthAttribute
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192, false, false); checkRandomData(random, a, 10 * RANDOM_MULTIPLIER, 8192, false, false);
a.close(); a.close();
} }
} }

View File

@ -86,7 +86,7 @@ public class TestCollationDocValuesField extends LuceneTestCase {
doc.add(field); doc.add(field);
doc.add(collationField); doc.add(collationField);
int numDocs = atLeast(500); int numDocs = atLeast(100);
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
String value = TestUtil.randomSimpleString(random()); String value = TestUtil.randomSimpleString(random());
field.setStringValue(value); field.setStringValue(value);
@ -98,7 +98,7 @@ public class TestCollationDocValuesField extends LuceneTestCase {
iw.close(); iw.close();
IndexSearcher is = newSearcher(ir); IndexSearcher is = newSearcher(ir);
int numChecks = atLeast(100); int numChecks = atLeast(20);
try { try {
for (int i = 0; i < numChecks; i++) { for (int i = 0; i < numChecks; i++) {

View File

@ -339,7 +339,7 @@ public class TestICUTokenizer extends BaseTokenStreamTestCase {
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, a, 10 * RANDOM_MULTIPLIER, 8192);
} }
public void testTokenAttributes() throws Exception { public void testTokenAttributes() throws Exception {

View File

@ -115,6 +115,6 @@ public class TestICUTokenizerCJK extends BaseTokenStreamTestCase {
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, a, 10 * RANDOM_MULTIPLIER, 8192);
} }
} }

View File

@ -80,6 +80,12 @@ public class TestExtendedMode extends BaseTokenStreamTestCase {
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
}
@Nightly
public void testRandomHugeStringsAtNight() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192); checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
} }

View File

@ -94,7 +94,7 @@ public class TestJapaneseKatakanaStemFilter extends BaseTokenStreamTestCase {
} }
public void testRandomData() throws IOException { public void testRandomData() throws IOException {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -103,8 +103,8 @@ public class TestJapaneseReadingFormFilter extends BaseTokenStreamTestCase {
public void testRandomData() throws IOException { public void testRandomData() throws IOException {
Random random = random(); Random random = random();
checkRandomData(random, katakanaAnalyzer, 200*RANDOM_MULTIPLIER); checkRandomData(random, katakanaAnalyzer, 200 * RANDOM_MULTIPLIER);
checkRandomData(random, romajiAnalyzer, 200*RANDOM_MULTIPLIER); checkRandomData(random, romajiAnalyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -369,7 +369,8 @@ public class
public void testLargeDocReliability() throws Exception { public void testLargeDocReliability() throws Exception {
for (int i = 0; i < 10; i++) { int numIters = atLeast(1);
for (int i = 0; i < numIters; i++) {
String s = TestUtil.randomUnicodeString(random(), 10000); String s = TestUtil.randomUnicodeString(random(), 10000);
try (TokenStream ts = analyzer.tokenStream("foo", s)) { try (TokenStream ts = analyzer.tokenStream("foo", s)) {
ts.reset(); ts.reset();

View File

@ -73,7 +73,7 @@ public class TestUkrainianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new UkrainianMorfologikAnalyzer(); Analyzer analyzer = new UkrainianMorfologikAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -58,7 +58,7 @@ public class TestKoreanReadingFormFilter extends BaseTokenStreamTestCase {
public void testRandomData() throws IOException { public void testRandomData() throws IOException {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random, analyzer, 200 * RANDOM_MULTIPLIER);
} }
public void testEmptyTerm() throws IOException { public void testEmptyTerm() throws IOException {

View File

@ -400,7 +400,7 @@ public class TestKoreanTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, graph); return new TokenStreamComponents(tokenizer, graph);
} }
}; };
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192); checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
analyzer.close(); analyzer.close();
} }

View File

@ -105,7 +105,7 @@ public class TestPhoneticFilter extends BaseTokenStreamTestCase {
} }
}; };
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), b, 200 * RANDOM_MULTIPLIER);
b.close(); b.close();
} }
} }

View File

@ -53,7 +53,7 @@ public class TestPolishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new PolishAnalyzer(); Analyzer analyzer = new PolishAnalyzer();
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }
} }

View File

@ -55,11 +55,13 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
/** /**
* Test very simply that perf tasks - simple algorithms - are doing what they should. * Test very simply that perf tasks - simple algorithms - are doing what they should.
*/ */
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestPerfTasksLogic extends BenchmarkTestCase { public class TestPerfTasksLogic extends BenchmarkTestCase {
@Override @Override

View File

@ -42,6 +42,7 @@ import org.junit.Test;
/** /**
* Testcase for {@link org.apache.lucene.classification.utils.DatasetSplitter} * Testcase for {@link org.apache.lucene.classification.utils.DatasetSplitter}
*/ */
@LuceneTestCase.SuppressCodecs("SimpleText")
public class DataSplitterTest extends LuceneTestCase { public class DataSplitterTest extends LuceneTestCase {
private LeafReader originalIndex; private LeafReader originalIndex;

View File

@ -439,7 +439,7 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Analyzer analyzer = new StandardAnalyzer(); Analyzer analyzer = new StandardAnalyzer();
checkRandomData(random(), analyzer, 200*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
analyzer.close(); analyzer.close();
} }

View File

@ -51,6 +51,7 @@ public class TestCharTermAttributeImpl extends LuceneTestCase {
}); });
} }
@Slow
public void testGrow() { public void testGrow() {
CharTermAttributeImpl t = new CharTermAttributeImpl(); CharTermAttributeImpl t = new CharTermAttributeImpl();
StringBuilder buf = new StringBuilder("ab"); StringBuilder buf = new StringBuilder("ab");

View File

@ -108,7 +108,7 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase {
byte[] pointValue = new byte[3]; byte[] pointValue = new byte[3];
byte[] uniquePointValue = new byte[3]; byte[] uniquePointValue = new byte[3];
random().nextBytes(uniquePointValue); random().nextBytes(uniquePointValue);
final int numDocs = atLeast(10000); // make sure we have several leaves final int numDocs = TEST_NIGHTLY ? atLeast(10000) : atLeast(500); // at night, make sure we have several leaves
final boolean multiValues = random().nextBoolean(); final boolean multiValues = random().nextBoolean();
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
Document doc = new Document(); Document doc = new Document();
@ -215,7 +215,7 @@ public class TestLucene60PointsFormat extends BasePointsFormatTestCase {
uniquePointValue[1] = new byte[3]; uniquePointValue[1] = new byte[3];
random().nextBytes(uniquePointValue[0]); random().nextBytes(uniquePointValue[0]);
random().nextBytes(uniquePointValue[1]); random().nextBytes(uniquePointValue[1]);
final int numDocs = atLeast(10000); // make sure we have several leaves final int numDocs = TEST_NIGHTLY? atLeast(10000) : atLeast(1000); // in nightly, make sure we have several leaves
final boolean multiValues = random().nextBoolean(); final boolean multiValues = random().nextBoolean();
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
Document doc = new Document(); Document doc = new Document();

View File

@ -319,7 +319,8 @@ public class TestIndexedDISI extends LuceneTestCase {
public void testFewMissingDocs() throws IOException { public void testFewMissingDocs() throws IOException {
try (Directory dir = newDirectory()) { try (Directory dir = newDirectory()) {
for (int iter = 0; iter < 100; ++iter) { int numIters = atLeast(10);
for (int iter = 0; iter < numIters; ++iter) {
int maxDoc = TestUtil.nextInt(random(), 1, 100000); int maxDoc = TestUtil.nextInt(random(), 1, 100000);
FixedBitSet set = new FixedBitSet(maxDoc); FixedBitSet set = new FixedBitSet(maxDoc);
set.set(0, maxDoc); set.set(0, maxDoc);
@ -412,7 +413,8 @@ public class TestIndexedDISI extends LuceneTestCase {
public void testRandom() throws IOException { public void testRandom() throws IOException {
try (Directory dir = newDirectory()) { try (Directory dir = newDirectory()) {
for (int i = 0; i < 10; ++i) { int numIters = atLeast(3);
for (int i = 0; i < numIters; ++i) {
doTestRandom(dir); doTestRandom(dir);
} }
} }

View File

@ -67,6 +67,7 @@ import org.junit.Test;
//TODO: would be better in this test to pull termsenums and instanceof or something? //TODO: would be better in this test to pull termsenums and instanceof or something?
// this way we can verify PFPF is doing the right thing. // this way we can verify PFPF is doing the right thing.
// for now we do termqueries. // for now we do termqueries.
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestPerFieldPostingsFormat2 extends LuceneTestCase { public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
private IndexWriter newWriter(Directory dir, IndexWriterConfig conf) private IndexWriter newWriter(Directory dir, IndexWriterConfig conf)

View File

@ -70,7 +70,7 @@ public abstract class BaseShapeTestCase extends LuceneTestCase {
// A particularly tricky adversary for BKD tree: // A particularly tricky adversary for BKD tree:
public void testSameShapeManyTimes() throws Exception { public void testSameShapeManyTimes() throws Exception {
int numShapes = TEST_NIGHTLY ? atLeast(50) : atLeast(10); int numShapes = TEST_NIGHTLY ? atLeast(50) : atLeast(3);
// Every doc has 2 points: // Every doc has 2 points:
Object theShape = nextShape(); Object theShape = nextShape();

View File

@ -338,7 +338,7 @@ public class TestLatLonPointDistanceFeatureQuery extends LuceneTestCase {
LatLonDocValuesField docValue = new LatLonDocValuesField("foo", 0., 0.); LatLonDocValuesField docValue = new LatLonDocValuesField("foo", 0., 0.);
doc.add(docValue); doc.add(docValue);
int numDocs = atLeast(10000); int numDocs = atLeast(1000);
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
double lat = random().nextDouble() * 180 - 90; double lat = random().nextDouble() * 180 - 90;
double lon = random().nextDouble() * 360 - 180; double lon = random().nextDouble() * 360 - 180;
@ -350,7 +350,8 @@ public class TestLatLonPointDistanceFeatureQuery extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(w); IndexReader reader = DirectoryReader.open(w);
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);
for (int iter = 0; iter < 10; ++iter) { int numIters = atLeast(3);
for (int iter = 0; iter < numIters; ++iter) {
double lat = random().nextDouble() * 180 - 90; double lat = random().nextDouble() * 180 - 90;
double lon = random().nextDouble() * 360 - 180; double lon = random().nextDouble() * 360 - 180;
double pivotDistance = random().nextDouble() * random().nextDouble() * Math.PI * GeoUtils.EARTH_MEAN_RADIUS_METERS; double pivotDistance = random().nextDouble() * random().nextDouble() * Math.PI * GeoUtils.EARTH_MEAN_RADIUS_METERS;

View File

@ -25,8 +25,10 @@ import org.apache.lucene.geo.Tessellator;
import org.apache.lucene.geo.XYPolygon; import org.apache.lucene.geo.XYPolygon;
import org.apache.lucene.geo.XYRectangle; import org.apache.lucene.geo.XYRectangle;
import org.apache.lucene.geo.XYRectangle2D; import org.apache.lucene.geo.XYRectangle2D;
import org.apache.lucene.util.LuceneTestCase;
/** random cartesian bounding box, line, and polygon query tests for random indexed arrays of cartesian {@link XYPolygon} types */ /** random cartesian bounding box, line, and polygon query tests for random indexed arrays of cartesian {@link XYPolygon} types */
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestXYMultiPolygonShapeQueries extends BaseXYShapeTestCase { public class TestXYMultiPolygonShapeQueries extends BaseXYShapeTestCase {
@Override @Override
protected ShapeType getShapeType() { protected ShapeType getShapeType() {

View File

@ -50,6 +50,7 @@ import org.junit.Assume;
import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestDirectoryReader extends LuceneTestCase { public class TestDirectoryReader extends LuceneTestCase {
public void testDocument() throws IOException { public void testDocument() throws IOException {

View File

@ -168,6 +168,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
* file. We check this by using MockDirectoryWrapper to * file. We check this by using MockDirectoryWrapper to
* measure max temp disk space used. * measure max temp disk space used.
*/ */
// TODO: can this write less docs/indexes?
@Nightly
public void testCommitOnCloseDiskUsage() throws IOException { public void testCommitOnCloseDiskUsage() throws IOException {
// MemoryCodec, since it uses FST, is not necessarily // MemoryCodec, since it uses FST, is not necessarily
// "additive", ie if you add up N small FSTs, then merge // "additive", ie if you add up N small FSTs, then merge

View File

@ -980,7 +980,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
dir.close(); dir.close();
} }
@Slow // TODO: this test can hit pathological cases (IW settings?) where it runs for far too long
@Nightly
public void testIndexingThenDeleting() throws Exception { public void testIndexingThenDeleting() throws Exception {
// TODO: move this test to its own class and just @SuppressCodecs? // TODO: move this test to its own class and just @SuppressCodecs?
// TODO: is it enough to just use newFSDirectory? // TODO: is it enough to just use newFSDirectory?

View File

@ -362,6 +362,8 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase {
/** /**
* LUCENE-6299: Test if addindexes(Dir[]) prevents exceeding max docs. * LUCENE-6299: Test if addindexes(Dir[]) prevents exceeding max docs.
*/ */
// TODO: can we use the setter to lower the amount of docs to be written here?
@Nightly
public void testAddTooManyIndexesDir() throws Exception { public void testAddTooManyIndexesDir() throws Exception {
// we cheat and add the same one over again... IW wants a write lock on each // we cheat and add the same one over again... IW wants a write lock on each
Directory dir = newDirectory(random(), NoLockFactory.INSTANCE); Directory dir = newDirectory(random(), NoLockFactory.INSTANCE);

View File

@ -128,6 +128,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
} }
// Test the case where both mergeFactor and maxBufferedDocs change // Test the case where both mergeFactor and maxBufferedDocs change
@Nightly
public void testMaxBufferedDocsChange() throws IOException { public void testMaxBufferedDocsChange() throws IOException {
Directory dir = newDirectory(); Directory dir = newDirectory();

View File

@ -42,6 +42,7 @@ import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestIndexWriterThreadsToSegments extends LuceneTestCase { public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
// LUCENE-5644: for first segment, two threads each indexed one doc (likely concurrently), but for second segment, each thread indexed the // LUCENE-5644: for first segment, two threads each indexed one doc (likely concurrently), but for second segment, each thread indexed the

View File

@ -50,7 +50,7 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
/** /**
* MultiThreaded IndexWriter tests * MultiThreaded IndexWriter tests
*/ */
@Slow @Slow @LuceneTestCase.SuppressCodecs("SimpleText")
public class TestIndexWriterWithThreads extends LuceneTestCase { public class TestIndexWriterWithThreads extends LuceneTestCase {
// Used by test cases below // Used by test cases below
@ -141,7 +141,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
public void testImmediateDiskFullWithThreads() throws Exception { public void testImmediateDiskFullWithThreads() throws Exception {
int NUM_THREADS = 3; int NUM_THREADS = 3;
final int numIterations = TEST_NIGHTLY ? 10 : 3; final int numIterations = TEST_NIGHTLY ? 10 : 1;
for (int iter=0;iter<numIterations;iter++) { for (int iter=0;iter<numIterations;iter++) {
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: iter=" + iter); System.out.println("\nTEST: iter=" + iter);

View File

@ -30,6 +30,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
/** Test that creates way, way, way too many fields */ /** Test that creates way, way, way too many fields */
@LuceneTestCase.SuppressCodecs("SimpleText")
public class TestManyFields extends LuceneTestCase { public class TestManyFields extends LuceneTestCase {
private static final FieldType storedTextType = new FieldType(TextField.TYPE_NOT_STORED); private static final FieldType storedTextType = new FieldType(TextField.TYPE_NOT_STORED);

Some files were not shown because too many files have changed in this diff Show More