mirror of https://github.com/apache/lucene.git
more analysis test speedups
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1642006 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c2c106befa
commit
3a7dee2f16
|
@ -77,7 +77,7 @@ public class TestFactories extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
// beast it just a little, it shouldnt throw exceptions:
|
// beast it just a little, it shouldnt throw exceptions:
|
||||||
// (it should have thrown them in initialize)
|
// (it should have thrown them in initialize)
|
||||||
checkRandomData(random(), new FactoryAnalyzer(factory, null, null), 100, 20, false, false);
|
checkRandomData(random(), new FactoryAnalyzer(factory, null, null), 20, 20, false, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ public class TestFactories extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
// beast it just a little, it shouldnt throw exceptions:
|
// beast it just a little, it shouldnt throw exceptions:
|
||||||
// (it should have thrown them in initialize)
|
// (it should have thrown them in initialize)
|
||||||
checkRandomData(random(), new FactoryAnalyzer(assertingTokenizer, factory, null), 100, 20, false, false);
|
checkRandomData(random(), new FactoryAnalyzer(assertingTokenizer, factory, null), 20, 20, false, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ public class TestFactories extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
// beast it just a little, it shouldnt throw exceptions:
|
// beast it just a little, it shouldnt throw exceptions:
|
||||||
// (it should have thrown them in initialize)
|
// (it should have thrown them in initialize)
|
||||||
checkRandomData(random(), new FactoryAnalyzer(assertingTokenizer, null, factory), 100, 20, false, false);
|
checkRandomData(random(), new FactoryAnalyzer(assertingTokenizer, null, factory), 20, 20, false, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -99,7 +99,8 @@ public class EdgeNGramTokenizerTest extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
for (int i = 0; i < 10; i++) {
|
int numIters = TEST_NIGHTLY ? 10 : 1;
|
||||||
|
for (int i = 0; i < numIters; i++) {
|
||||||
final int min = TestUtil.nextInt(random(), 2, 10);
|
final int min = TestUtil.nextInt(random(), 2, 10);
|
||||||
final int max = TestUtil.nextInt(random(), min, 20);
|
final int max = TestUtil.nextInt(random(), min, 20);
|
||||||
|
|
||||||
|
|
|
@ -110,7 +110,8 @@ public class NGramTokenizerTest extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
for (int i = 0; i < 10; i++) {
|
int numIters = TEST_NIGHTLY ? 10 : 1;
|
||||||
|
for (int i = 0; i < numIters; i++) {
|
||||||
final int min = TestUtil.nextInt(random(), 2, 10);
|
final int min = TestUtil.nextInt(random(), 2, 10);
|
||||||
final int max = TestUtil.nextInt(random(), min, 20);
|
final int max = TestUtil.nextInt(random(), min, 20);
|
||||||
Analyzer a = new Analyzer() {
|
Analyzer a = new Analyzer() {
|
||||||
|
|
|
@ -130,6 +130,6 @@ public class TestSnowball extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
|
return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -24,12 +24,14 @@ import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
import org.apache.lucene.analysis.core.KeywordTokenizer;
|
import org.apache.lucene.analysis.core.KeywordTokenizer;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||||
|
|
||||||
import static org.apache.lucene.analysis.VocabularyAssert.*;
|
import static org.apache.lucene.analysis.VocabularyAssert.*;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the snowball filters against the snowball data tests
|
* Test the snowball filters against the snowball data tests
|
||||||
*/
|
*/
|
||||||
|
@Slow
|
||||||
public class TestSnowballVocab extends LuceneTestCase {
|
public class TestSnowballVocab extends LuceneTestCase {
|
||||||
/**
|
/**
|
||||||
* Run all languages against their snowball vocabulary tests.
|
* Run all languages against their snowball vocabulary tests.
|
||||||
|
|
Loading…
Reference in New Issue