LUCENE-4174: tone down the now really slow analysis tests

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1358149 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-07-06 11:56:47 +00:00
parent 4aa31b1e19
commit c746415d08
92 changed files with 140 additions and 142 deletions

View File

@ -96,6 +96,6 @@ public class TestArabicAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new ArabicAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new ArabicAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -76,6 +76,6 @@ public class TestBulgarianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new BulgarianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new BulgarianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -162,7 +162,7 @@ public class TestBrazilianStemmer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new BrazilianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new BrazilianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -58,6 +58,6 @@ public class TestCatalanAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new CatalanAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new CatalanAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -508,12 +508,12 @@ public class HTMLStripCharFilterTest extends BaseTokenStreamTestCase {
}
public void testRandom() throws Exception {
int numRounds = RANDOM_MULTIPLIER * 10000;
int numRounds = RANDOM_MULTIPLIER * 1000;
checkRandomData(random(), newTestAnalyzer(), numRounds);
}
public void testRandomHugeStrings() throws Exception {
int numRounds = RANDOM_MULTIPLIER * 200;
int numRounds = RANDOM_MULTIPLIER * 100;
checkRandomData(random(), newTestAnalyzer(), numRounds, 8192);
}

View File

@ -229,7 +229,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
//@Ignore("wrong finalOffset: https://issues.apache.org/jira/browse/LUCENE-3971")
public void testRandomMaps() throws Exception {
int numIterations = atLeast(10);
int numIterations = atLeast(3);
for (int i = 0; i < numIterations; i++) {
final NormalizeCharMap map = randomMap();
Analyzer analyzer = new Analyzer() {
@ -270,7 +270,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
public void testRandomMaps2() throws Exception {
final Random random = random();
final int numIterations = atLeast(10);
final int numIterations = atLeast(3);
for(int iter=0;iter<numIterations;iter++) {
if (VERBOSE) {

View File

@ -272,13 +272,13 @@ public class TestCJKAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new CJKAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new CJKAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new CJKAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new CJKAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
public void testEmptyTerm() throws IOException {

View File

@ -63,7 +63,7 @@ public class TestCJKWidthFilter extends BaseTokenStreamTestCase {
}
public void testRandomData() throws IOException {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -318,7 +318,7 @@ public class CommonGramsFilterTest extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
Analyzer b = new Analyzer() {
@ -330,6 +330,6 @@ public class CommonGramsFilterTest extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -348,7 +348,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(TEST_VERSION_CURRENT, tokenizer, dict));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
InputSource is = new InputSource(getClass().getResource("da_UTF8.xml").toExternalForm());
final HyphenationTree hyphenator = HyphenationCompoundWordTokenFilter.getHyphenationTree(is);
@ -361,7 +361,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter);
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws Exception {

View File

@ -191,17 +191,17 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new WhitespaceAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new SimpleAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new StopAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new WhitespaceAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), new SimpleAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), new StopAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new SimpleAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new StopAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new SimpleAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new StopAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -315,12 +315,12 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new ClassicAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new ClassicAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new ClassicAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new ClassicAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -127,6 +127,6 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new KeywordAnalyzer(), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new KeywordAnalyzer(), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -233,13 +233,13 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new StandardAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new StandardAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new StandardAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new StandardAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
// Adds random graph after:
@ -254,6 +254,6 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenStream);
}
},
200*RANDOM_MULTIPLIER, 8192);
100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -252,6 +252,6 @@ public class TestUAX29URLEmailAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new UAX29URLEmailAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new UAX29URLEmailAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -455,12 +455,12 @@ public class TestUAX29URLEmailTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -52,6 +52,6 @@ public class TestCzechAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new CzechAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new CzechAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestDanishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new DanishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new DanishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -61,6 +61,6 @@ public class TestGermanAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new GermanAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new GermanAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -48,7 +48,7 @@ public class TestGermanLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -60,7 +60,7 @@ public class TestGermanMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -64,7 +64,7 @@ public class TestGermanNormalizationFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -60,7 +60,7 @@ public class TestGermanStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -66,6 +66,6 @@ public class GreekAnalyzerTest extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new GreekAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new GreekAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -55,6 +55,6 @@ public class TestEnglishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new EnglishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new EnglishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -54,7 +54,7 @@ public class TestEnglishMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -42,7 +42,7 @@ public class TestKStemmer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/**

View File

@ -63,7 +63,7 @@ public class TestPorterStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestSpanishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new SpanishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new SpanishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -48,7 +48,7 @@ public class TestSpanishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestBasqueAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new BasqueAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new BasqueAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -224,6 +224,6 @@ public class TestPersianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new PersianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new PersianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestFinnishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new FinnishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new FinnishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -48,7 +48,7 @@ public class TestFinnishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -164,7 +164,7 @@ public class TestFrenchAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new FrenchAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new FrenchAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** test accent-insensitive */

View File

@ -178,7 +178,7 @@ public class TestFrenchLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -62,7 +62,7 @@ public class TestFrenchMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -66,6 +66,6 @@ public class TestIrishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new IrishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new IrishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestGalicianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new GalicianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new GalicianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -52,7 +52,7 @@ public class TestGalicianMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -47,6 +47,6 @@ public class TestHindiAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new HindiAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new HindiAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestHungarianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new HungarianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new HungarianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -72,7 +72,7 @@ public class HunspellStemFilterTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new HunspellStemFilter(tokenizer, DICTIONARY));
}
};
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestArmenianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new ArmenianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new ArmenianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestIndonesianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new IndonesianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new IndonesianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -54,7 +54,7 @@ public class TestItalianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new ItalianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new ItalianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** test that the elisionfilter is working */

View File

@ -48,7 +48,7 @@ public class TestItalianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestLatvianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new LatvianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new LatvianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -1923,7 +1923,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ASCIIFoldingFilter(tokenizer));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -132,7 +132,7 @@ public class TestCapitalizationFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -74,7 +74,7 @@ public class TestHyphenatedWordsFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -77,6 +77,6 @@ public class TestKeepWordFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -123,7 +123,7 @@ public class TestTrimFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new TrimFilter(tokenizer, false));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
Analyzer b = new Analyzer() {
@ -133,7 +133,7 @@ public class TestTrimFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new TrimFilter(tokenizer, true));
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -147,20 +147,20 @@ public class EdgeNGramTokenFilterTest extends BaseTokenStreamTestCase {
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer,
new EdgeNGramTokenFilter(tokenizer, EdgeNGramTokenFilter.Side.FRONT, 2, 15));
new EdgeNGramTokenFilter(tokenizer, EdgeNGramTokenFilter.Side.FRONT, 2, 4));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
Analyzer b = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer,
new EdgeNGramTokenFilter(tokenizer, EdgeNGramTokenFilter.Side.BACK, 2, 15));
new EdgeNGramTokenFilter(tokenizer, EdgeNGramTokenFilter.Side.BACK, 2, 4));
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER, 20, false, false);
}
public void testEmptyTerm() throws Exception {

View File

@ -105,21 +105,21 @@ public class EdgeNGramTokenizerTest extends BaseTokenStreamTestCase {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new EdgeNGramTokenizer(reader, EdgeNGramTokenizer.Side.FRONT, 2, 15);
Tokenizer tokenizer = new EdgeNGramTokenizer(reader, EdgeNGramTokenizer.Side.FRONT, 2, 4);
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 8192, false, false);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER, 8192, false, false);
Analyzer b = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new EdgeNGramTokenizer(reader, EdgeNGramTokenizer.Side.BACK, 2, 15);
Tokenizer tokenizer = new EdgeNGramTokenizer(reader, EdgeNGramTokenizer.Side.BACK, 2, 4);
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), b, 200*RANDOM_MULTIPLIER, 8192, false, false);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), b, 100*RANDOM_MULTIPLIER, 8192, false, false);
}
}

View File

@ -129,10 +129,10 @@ public class NGramTokenFilterTest extends BaseTokenStreamTestCase {
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer,
new NGramTokenFilter(tokenizer, 2, 15));
new NGramTokenFilter(tokenizer, 2, 4));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false);
}
public void testEmptyTerm() throws Exception {

View File

@ -29,7 +29,6 @@ import org.apache.lucene.util.LuceneTestCase.Slow;
/**
* Tests {@link NGramTokenizer} for correctness.
*/
@Slow
public class NGramTokenizerTest extends BaseTokenStreamTestCase {
private StringReader input;
@ -100,11 +99,11 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new NGramTokenizer(reader, 2, 15);
Tokenizer tokenizer = new NGramTokenizer(reader, 2, 4);
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER, 8192, false, false);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER, 20, false, false);
checkRandomData(random(), a, 50*RANDOM_MULTIPLIER, 1027, false, false);
}
}

View File

@ -161,7 +161,7 @@ public class TestDutchStemmer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new DutchAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new DutchAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestNorwegianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new NorwegianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new NorwegianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,7 +51,7 @@ public class TestNorwegianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,7 +51,7 @@ public class TestNorwegianMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -208,7 +208,7 @@ public class TestPathHierarchyTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
@ -221,6 +221,6 @@ public class TestPathHierarchyTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 1027);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 1027);
}
}

View File

@ -169,7 +169,7 @@ public class TestReversePathHierarchyTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
@ -182,6 +182,6 @@ public class TestReversePathHierarchyTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 1027);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 1027);
}
}

View File

@ -93,7 +93,7 @@ public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
Analyzer b = new Analyzer() {
@Override
@ -103,7 +103,7 @@ public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, filter);
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -138,7 +138,7 @@ public class TestPatternTokenizer extends BaseTokenStreamTestCase
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
Analyzer b = new Analyzer() {
@Override
@ -152,6 +152,6 @@ public class TestPatternTokenizer extends BaseTokenStreamTestCase
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), b, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), b, 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -51,6 +51,6 @@ public class TestPortugueseAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new PortugueseAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new PortugueseAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -95,7 +95,7 @@ public class TestPortugueseLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -69,7 +69,7 @@ public class TestPortugueseMinimalStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -69,7 +69,7 @@ public class TestPortugueseStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -102,7 +102,7 @@ public class TestReverseStringFilter extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(TEST_VERSION_CURRENT, tokenizer));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestRomanianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new RomanianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new RomanianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -57,6 +57,6 @@ public class TestRussianAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new RussianAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new RussianAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -48,7 +48,7 @@ public class TestRussianLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -1144,7 +1144,7 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
@ -1157,7 +1157,7 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestSwedishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new SwedishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new SwedishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -48,7 +48,7 @@ public class TestSwedishLightStemFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -42,7 +42,6 @@ import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.LuceneTestCase.Slow;
@Slow
public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
private SynonymMap.Builder b;
@ -429,7 +428,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
* does verify it doesnt throw exceptions, or that the stream doesn't misbehave
*/
public void testRandom2() throws Exception {
final int numIters = atLeast(10);
final int numIters = atLeast(3);
for (int i = 0; i < numIters; i++) {
b = new SynonymMap.Builder(random().nextBoolean());
final int numEntries = atLeast(10);
@ -447,7 +446,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random(), analyzer, 200);
checkRandomData(random(), analyzer, 100);
}
}
@ -484,7 +483,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
// Adds MockGraphTokenFilter after SynFilter:
public void testRandom2GraphAfter() throws Exception {
final int numIters = atLeast(10);
final int numIters = atLeast(3);
Random random = random();
for (int i = 0; i < numIters; i++) {
b = new SynonymMap.Builder(random.nextBoolean());
@ -505,7 +504,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random, analyzer, 200);
checkRandomData(random, analyzer, 100);
}
}
@ -533,11 +532,11 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
}
}
/** simple random test like testRandom2, but for large docs
/** simple random test like testRandom2, but for larger docs
*/
public void testRandomHuge() throws Exception {
Random random = random();
final int numIters = atLeast(10);
final int numIters = atLeast(3);
for (int i = 0; i < numIters; i++) {
b = new SynonymMap.Builder(random.nextBoolean());
final int numEntries = atLeast(10);
@ -555,7 +554,7 @@ public class TestSynonymMapFilter extends BaseTokenStreamTestCase {
}
};
checkRandomData(random, analyzer, 100, 8192);
checkRandomData(random, analyzer, 100, 1024);
}
}

View File

@ -109,13 +109,13 @@ public class TestThaiAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new ThaiAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new ThaiAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new ThaiAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new ThaiAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
// LUCENE-3044

View File

@ -51,6 +51,6 @@ public class TestTurkishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new TurkishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new TurkishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}

View File

@ -119,7 +119,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
int num = 10000 * RANDOM_MULTIPLIER;
int num = 1000 * RANDOM_MULTIPLIER;
for (int i = 0; i < num; i++) {
String s = _TestUtil.randomUnicodeString(random());
TokenStream ts = analyzer.tokenStream("foo", new StringReader(s));
@ -157,7 +157,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
int num = 10000 * RANDOM_MULTIPLIER;
int num = 1000 * RANDOM_MULTIPLIER;
for (int i = 0; i < num; i++) {
String s = _TestUtil.randomUnicodeString(random());
TokenStream ts = analyzer.tokenStream("foo", new StringReader(s));

View File

@ -184,7 +184,7 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
@ -198,6 +198,6 @@ public class WikipediaTokenizerTest extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, tokenizer);
}
};
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -76,7 +76,7 @@ public class TestICUFoldingFilter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -76,7 +76,7 @@ public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -235,12 +235,12 @@ public class TestICUTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -51,7 +51,7 @@ public class TestExtendedMode extends BaseTokenStreamTestCase {
/** random test ensuring we don't ever split supplementaries */
public void testSurrogates2() throws IOException {
int numIterations = atLeast(10000);
int numIterations = atLeast(1000);
for (int i = 0; i < numIterations; i++) {
String s = _TestUtil.randomUnicodeString(random(), 100);
TokenStream ts = analyzer.tokenStream("foo", new StringReader(s));
@ -66,12 +66,12 @@ public class TestExtendedMode extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192);
}
}

View File

@ -140,7 +140,7 @@ public class TestJapaneseAnalyzer extends BaseTokenStreamTestCase {
final Analyzer a = new JapaneseAnalyzer(TEST_VERSION_CURRENT, null, Mode.SEARCH,
JapaneseAnalyzer.getDefaultStopSet(),
JapaneseAnalyzer.getDefaultStopTags());
checkRandomData(random, a, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192);
}
// Copied from TestJapaneseTokenizer, to make sure passing

View File

@ -46,7 +46,7 @@ public class TestJapaneseBaseFormFilter extends BaseTokenStreamTestCase {
}
public void testRandomStrings() throws IOException {
checkRandomData(random(), analyzer, atLeast(10000));
checkRandomData(random(), analyzer, atLeast(1000));
}
public void testEmptyTerm() throws IOException {

View File

@ -67,7 +67,7 @@ public class TestJapaneseKatakanaStemFilter extends BaseTokenStreamTestCase {
}
public void testRandomData() throws IOException {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {

View File

@ -185,15 +185,15 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzerNoPunct, 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzerNoPunct, 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, analyzer, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzerNoPunct, 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzerNoPunct, 100*RANDOM_MULTIPLIER, 8192);
}
public void testRandomHugeStringsMockGraphAfter() throws Exception {
@ -208,7 +208,7 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, graph);
}
},
200*RANDOM_MULTIPLIER, 8192);
100*RANDOM_MULTIPLIER, 8192);
}
public void testLargeDocReliability() throws Exception {

View File

@ -131,6 +131,6 @@ public class TestMorfologikAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandom() throws Exception {
checkRandomData(random(), getTestAnalyzer(), 10000 * RANDOM_MULTIPLIER);
checkRandomData(random(), getTestAnalyzer(), 1000 * RANDOM_MULTIPLIER);
}
}

View File

@ -224,13 +224,13 @@ public class TestSmartChineseAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new SmartChineseAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new SmartChineseAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
/** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception {
Random random = random();
checkRandomData(random, new SmartChineseAnalyzer(TEST_VERSION_CURRENT), 200*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, new SmartChineseAnalyzer(TEST_VERSION_CURRENT), 100*RANDOM_MULTIPLIER, 8192);
}
public void testEmptyTerm() throws IOException {

View File

@ -51,6 +51,6 @@ public class TestPolishAnalyzer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
checkRandomData(random(), new PolishAnalyzer(TEST_VERSION_CURRENT), 10000*RANDOM_MULTIPLIER);
checkRandomData(random(), new PolishAnalyzer(TEST_VERSION_CURRENT), 1000*RANDOM_MULTIPLIER);
}
}