remove slow, tune test iterations instead

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1642021 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-11-27 03:56:35 +00:00
parent 6abb23b22b
commit c377fb5a69
3 changed files with 10 additions and 15 deletions

View File

@ -28,9 +28,7 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.LuceneTestCase.Slow;
@Slow
public class TestExtendedMode extends BaseTokenStreamTestCase { public class TestExtendedMode extends BaseTokenStreamTestCase {
private final Analyzer analyzer = new Analyzer() { private final Analyzer analyzer = new Analyzer() {
@ -66,12 +64,12 @@ public class TestExtendedMode extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random, analyzer, 500*RANDOM_MULTIPLIER);
} }
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, analyzer, 30*RANDOM_MULTIPLIER, 8192);
} }
} }

View File

@ -23,12 +23,10 @@ import java.util.Random;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.BaseTokenStreamTestCase; import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode;
import org.apache.lucene.util.LuceneTestCase.Slow;
/** /**
* Test Kuromoji Japanese morphological analyzer * Test Kuromoji Japanese morphological analyzer
*/ */
@Slow
public class TestJapaneseAnalyzer extends BaseTokenStreamTestCase { public class TestJapaneseAnalyzer extends BaseTokenStreamTestCase {
/** This test fails with NPE when the /** This test fails with NPE when the
* stopwords file is missing in classpath */ * stopwords file is missing in classpath */
@ -130,7 +128,7 @@ public class TestJapaneseAnalyzer extends BaseTokenStreamTestCase {
final Analyzer a = new JapaneseAnalyzer(null, Mode.SEARCH, final Analyzer a = new JapaneseAnalyzer(null, Mode.SEARCH,
JapaneseAnalyzer.getDefaultStopSet(), JapaneseAnalyzer.getDefaultStopSet(),
JapaneseAnalyzer.getDefaultStopTags()); JapaneseAnalyzer.getDefaultStopTags());
checkRandomData(random, a, atLeast(10000)); checkRandomData(random, a, atLeast(1000));
} }
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
@ -139,7 +137,7 @@ public class TestJapaneseAnalyzer extends BaseTokenStreamTestCase {
final Analyzer a = new JapaneseAnalyzer(null, Mode.SEARCH, final Analyzer a = new JapaneseAnalyzer(null, Mode.SEARCH,
JapaneseAnalyzer.getDefaultStopSet(), JapaneseAnalyzer.getDefaultStopSet(),
JapaneseAnalyzer.getDefaultStopTags()); JapaneseAnalyzer.getDefaultStopTags());
checkRandomData(random, a, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, a, 2*RANDOM_MULTIPLIER, 8192);
} }
// Copied from TestJapaneseTokenizer, to make sure passing // Copied from TestJapaneseTokenizer, to make sure passing

View File

@ -39,7 +39,6 @@ import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.lucene.util.LuceneTestCase.Slow;
@Slow
public class TestJapaneseTokenizer extends BaseTokenStreamTestCase { public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
public static UserDictionary readDict() { public static UserDictionary readDict() {
@ -184,15 +183,15 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
/** blast some random strings through the analyzer */ /** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception { public void testRandomStrings() throws Exception {
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzer, 500*RANDOM_MULTIPLIER);
checkRandomData(random(), analyzerNoPunct, 1000*RANDOM_MULTIPLIER); checkRandomData(random(), analyzerNoPunct, 500*RANDOM_MULTIPLIER);
} }
/** blast some random large strings through the analyzer */ /** blast some random large strings through the analyzer */
public void testRandomHugeStrings() throws Exception { public void testRandomHugeStrings() throws Exception {
Random random = random(); Random random = random();
checkRandomData(random, analyzer, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, analyzer, 20*RANDOM_MULTIPLIER, 8192);
checkRandomData(random, analyzerNoPunct, 100*RANDOM_MULTIPLIER, 8192); checkRandomData(random, analyzerNoPunct, 20*RANDOM_MULTIPLIER, 8192);
} }
public void testRandomHugeStringsMockGraphAfter() throws Exception { public void testRandomHugeStringsMockGraphAfter() throws Exception {
@ -207,11 +206,11 @@ public class TestJapaneseTokenizer extends BaseTokenStreamTestCase {
return new TokenStreamComponents(tokenizer, graph); return new TokenStreamComponents(tokenizer, graph);
} }
}, },
100*RANDOM_MULTIPLIER, 8192); 20*RANDOM_MULTIPLIER, 8192);
} }
public void testLargeDocReliability() throws Exception { public void testLargeDocReliability() throws Exception {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 10; i++) {
String s = TestUtil.randomUnicodeString(random(), 10000); String s = TestUtil.randomUnicodeString(random(), 10000);
try (TokenStream ts = analyzer.tokenStream("foo", s)) { try (TokenStream ts = analyzer.tokenStream("foo", s)) {
ts.reset(); ts.reset();