mirror of https://github.com/apache/lucene.git
LUCENE-9163: test speedup for slowest/pathological tests
Calming down individual test methods with double-digit execution times after running tests many times. There are a few more issues remaining, but this solves the majority of them.
This commit is contained in:
parent
6b3e7feba1
commit
1051db4038
|
@ -112,7 +112,7 @@ public class TestArabicAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
ArabicAnalyzer a = new ArabicAnalyzer();
|
ArabicAnalyzer a = new ArabicAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200*RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ public class TestBulgarianAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
BulgarianAnalyzer a = new BulgarianAnalyzer();
|
BulgarianAnalyzer a = new BulgarianAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,7 +168,7 @@ public class TestBrazilianAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
BrazilianAnalyzer a = new BrazilianAnalyzer();
|
BrazilianAnalyzer a = new BrazilianAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ public class TestCatalanAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
CatalanAnalyzer a = new CatalanAnalyzer();
|
CatalanAnalyzer a = new CatalanAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -286,7 +286,7 @@ public class TestCJKAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new CJKAnalyzer();
|
Analyzer a = new CJKAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ public class TestSoraniAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new SoraniAnalyzer();
|
Analyzer a = new SoraniAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -334,7 +334,7 @@ public class CommonGramsFilterTest extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -375,7 +375,7 @@ public class TestCompoundWordTokenFilter extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(tokenizer, dict));
|
return new TokenStreamComponents(tokenizer, new DictionaryCompoundWordTokenFilter(tokenizer, dict));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
InputSource is = new InputSource(getClass().getResource("da_UTF8.xml").toExternalForm());
|
InputSource is = new InputSource(getClass().getResource("da_UTF8.xml").toExternalForm());
|
||||||
|
|
|
@ -68,7 +68,7 @@ public class TestGermanAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
GermanAnalyzer a = new GermanAnalyzer();
|
GermanAnalyzer a = new GermanAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class GreekAnalyzerTest extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new GreekAnalyzer();
|
Analyzer a = new GreekAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class TestEnglishAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new EnglishAnalyzer();
|
Analyzer a = new EnglishAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class TestKStemmer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -74,7 +74,7 @@ public class TestPorterStemFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class TestSpanishAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new SpanishAnalyzer();
|
Analyzer a = new SpanishAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class TestBasqueAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new BasqueAnalyzer();
|
Analyzer a = new BasqueAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,7 +240,7 @@ public class TestPersianAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
PersianAnalyzer a = new PersianAnalyzer();
|
PersianAnalyzer a = new PersianAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class TestFinnishAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new FinnishAnalyzer();
|
Analyzer a = new FinnishAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -168,7 +168,7 @@ public class TestFrenchAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new FrenchAnalyzer();
|
Analyzer a = new FrenchAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ public class TestIrishAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer a = new IrishAnalyzer();
|
Analyzer a = new IrishAnalyzer();
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ public class TestGalicianMinimalStemFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -1945,7 +1945,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
|
||||||
new ASCIIFoldingFilter(tokenizer, random().nextBoolean()));
|
new ASCIIFoldingFilter(tokenizer, random().nextBoolean()));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -135,7 +135,7 @@ public class TestCapitalizationFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ public class TestHyphenatedWordsFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class TestKeepWordFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,7 +58,7 @@ public class TestTrimFilter extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, new TrimFilter(tokenizer));
|
return new TokenStreamComponents(tokenizer, new TrimFilter(tokenizer));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -895,7 +895,7 @@ public class TestWordDelimiterGraphFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomPaths() throws Exception {
|
public void testRandomPaths() throws Exception {
|
||||||
int iters = atLeast(100);
|
int iters = atLeast(10);
|
||||||
for(int iter=0;iter<iters;iter++) {
|
for(int iter=0;iter<iters;iter++) {
|
||||||
String text = randomWDFText();
|
String text = randomWDFText();
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
|
|
@ -88,7 +88,7 @@ public class TestPatternReplaceFilter extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, filter);
|
return new TokenStreamComponents(tokenizer, filter);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -132,7 +132,7 @@ public class TestPatternTokenizer extends BaseTokenStreamTestCase
|
||||||
return new TokenStreamComponents(tokenizer);
|
return new TokenStreamComponents(tokenizer);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -257,7 +257,7 @@ public class TestSimplePatternSplitTokenizer extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer);
|
return new TokenStreamComponents(tokenizer);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -202,7 +202,7 @@ public class TestSimplePatternTokenizer extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer);
|
return new TokenStreamComponents(tokenizer);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -99,7 +99,7 @@ public class TestReverseStringFilter extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
|
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1111,7 +1111,7 @@ public class ShingleFilterTest extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
|
return new TokenStreamComponents(tokenizer, new ShingleFilter(tokenizer));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ public class TestSnowball extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
|
return new TokenStreamComponents(t, new SnowballFilter(t, snowballLanguage));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 100*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 20 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -354,7 +354,7 @@ public class TestUAX29URLEmailAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMaxTokenLengthDefault() throws Exception {
|
public void testMaxTokenLengthDefault() throws Exception {
|
||||||
|
|
|
@ -621,7 +621,7 @@ public class TestUAX29URLEmailTokenizer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** blast some random large strings through the analyzer */
|
/** blast some random large strings through the analyzer */
|
||||||
|
|
|
@ -91,7 +91,7 @@ public class TestICUFoldingFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -188,7 +188,7 @@ public class TestICUNormalizer2CharFilter extends BaseTokenStreamTestCase {
|
||||||
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc_cf", Normalizer2.Mode.COMPOSE));
|
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc_cf", Normalizer2.Mode.COMPOSE));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
// huge strings
|
// huge strings
|
||||||
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
|
||||||
a.close();
|
a.close();
|
||||||
|
@ -205,7 +205,7 @@ public class TestICUNormalizer2CharFilter extends BaseTokenStreamTestCase {
|
||||||
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc", Normalizer2.Mode.DECOMPOSE));
|
return new ICUNormalizer2CharFilter(reader, Normalizer2.getInstance(null, "nfkc", Normalizer2.Mode.DECOMPOSE));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
// huge strings
|
// huge strings
|
||||||
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random(), a, 25*RANDOM_MULTIPLIER, 8192);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
|
@ -91,7 +91,7 @@ public class TestICUNormalizer2Filter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class TestICUTransformFilter extends BaseTokenStreamTestCase {
|
||||||
return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, transform));
|
return new TokenStreamComponents(tokenizer, new ICUTransformFilter(tokenizer, transform));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -333,7 +333,7 @@ public class TestICUTokenizer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** blast some random large strings through the analyzer */
|
/** blast some random large strings through the analyzer */
|
||||||
|
|
|
@ -76,7 +76,7 @@ public class TestJapaneseBaseFormFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomStrings() throws IOException {
|
public void testRandomStrings() throws IOException {
|
||||||
checkRandomData(random(), analyzer, atLeast(1000));
|
checkRandomData(random(), analyzer, atLeast(200));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -256,7 +256,12 @@ public class TestJapaneseNumberFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
checkRandomData(random(), analyzer, 5 * RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random(), analyzer, RANDOM_MULTIPLIER, 4096);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test @Nightly
|
||||||
|
public void testRandomHugeStringsAtNight() throws Exception {
|
||||||
|
checkRandomData(random(), analyzer, 3 * RANDOM_MULTIPLIER, 8192);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -103,8 +103,8 @@ public class TestJapaneseReadingFormFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testRandomData() throws IOException {
|
public void testRandomData() throws IOException {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
checkRandomData(random, katakanaAnalyzer, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random, katakanaAnalyzer, 200*RANDOM_MULTIPLIER);
|
||||||
checkRandomData(random, romajiAnalyzer, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random, romajiAnalyzer, 200*RANDOM_MULTIPLIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEmptyTerm() throws IOException {
|
public void testEmptyTerm() throws IOException {
|
||||||
|
|
|
@ -320,7 +320,16 @@ public class
|
||||||
}
|
}
|
||||||
|
|
||||||
/** blast some random large strings through the analyzer */
|
/** blast some random large strings through the analyzer */
|
||||||
|
@Slow
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
|
Random random = random();
|
||||||
|
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
|
||||||
|
checkRandomData(random, analyzerNoPunct, RANDOM_MULTIPLIER, 4096);
|
||||||
|
checkRandomData(random, analyzerNormalNBest, RANDOM_MULTIPLIER, 4096);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nightly
|
||||||
|
public void testRandomHugeStringsAtNight() throws Exception {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
|
||||||
checkRandomData(random, analyzerNoPunct, 3*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random, analyzerNoPunct, 3*RANDOM_MULTIPLIER, 8192);
|
||||||
|
@ -328,6 +337,22 @@ public class
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomHugeStringsMockGraphAfter() throws Exception {
|
public void testRandomHugeStringsMockGraphAfter() throws Exception {
|
||||||
|
// Randomly inject graph tokens after JapaneseTokenizer:
|
||||||
|
Random random = random();
|
||||||
|
Analyzer analyzer = new Analyzer() {
|
||||||
|
@Override
|
||||||
|
protected TokenStreamComponents createComponents(String fieldName) {
|
||||||
|
Tokenizer tokenizer = new JapaneseTokenizer(newAttributeFactory(), readDict(), false, Mode.SEARCH);
|
||||||
|
TokenStream graph = new MockGraphTokenFilter(random(), tokenizer);
|
||||||
|
return new TokenStreamComponents(tokenizer, graph);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
|
||||||
|
analyzer.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nightly
|
||||||
|
public void testRandomHugeStringsMockGraphAfterAtNight() throws Exception {
|
||||||
// Randomly inject graph tokens after JapaneseTokenizer:
|
// Randomly inject graph tokens after JapaneseTokenizer:
|
||||||
Random random = random();
|
Random random = random();
|
||||||
Analyzer analyzer = new Analyzer() {
|
Analyzer analyzer = new Analyzer() {
|
||||||
|
@ -342,6 +367,7 @@ public class
|
||||||
analyzer.close();
|
analyzer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void testLargeDocReliability() throws Exception {
|
public void testLargeDocReliability() throws Exception {
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
String s = TestUtil.randomUnicodeString(random(), 10000);
|
String s = TestUtil.randomUnicodeString(random(), 10000);
|
||||||
|
|
|
@ -80,7 +80,7 @@ public class TestKoreanAnalyzer extends BaseTokenStreamTestCase {
|
||||||
public void testRandom() throws IOException {
|
public void testRandom() throws IOException {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
final Analyzer a = new KoreanAnalyzer();
|
final Analyzer a = new KoreanAnalyzer();
|
||||||
checkRandomData(random, a, atLeast(1000));
|
checkRandomData(random, a, atLeast(200));
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,7 +90,15 @@ public class TestKoreanAnalyzer extends BaseTokenStreamTestCase {
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
final Analyzer a = new KoreanAnalyzer();
|
final Analyzer a = new KoreanAnalyzer();
|
||||||
checkRandomData(random, a, 2 * RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random, a, RANDOM_MULTIPLIER, 4096);
|
||||||
|
a.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nightly
|
||||||
|
public void testRandomHugeStringsAtNight() throws Exception {
|
||||||
|
Random random = random();
|
||||||
|
final Analyzer a = new KoreanAnalyzer();
|
||||||
|
checkRandomData(random, a, 3 * RANDOM_MULTIPLIER, 8192);
|
||||||
a.close();
|
a.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -274,6 +274,11 @@ public class TestKoreanNumberFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
|
checkRandomData(random(), analyzer, RANDOM_MULTIPLIER, 4096);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test @Nightly
|
||||||
|
public void testRandomHugeStringsAtNight() throws Exception {
|
||||||
checkRandomData(random(), analyzer, 5 * RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random(), analyzer, 5 * RANDOM_MULTIPLIER, 8192);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -375,6 +375,14 @@ public class TestKoreanTokenizer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
/** blast some random large strings through the tokenizer */
|
/** blast some random large strings through the tokenizer */
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
|
Random random = random();
|
||||||
|
checkRandomData(random, analyzer, RANDOM_MULTIPLIER, 4096);
|
||||||
|
checkRandomData(random, analyzerUnigram, RANDOM_MULTIPLIER, 4096);
|
||||||
|
checkRandomData(random, analyzerDecompound, RANDOM_MULTIPLIER, 4096);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nightly
|
||||||
|
public void testRandomHugeStringsAtNight() throws Exception {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random, analyzer, 3*RANDOM_MULTIPLIER, 8192);
|
||||||
checkRandomData(random, analyzerUnigram, 3*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random, analyzerUnigram, 3*RANDOM_MULTIPLIER, 8192);
|
||||||
|
|
|
@ -86,6 +86,7 @@ public class TokenInfoDictionaryTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** enumerates the entire FST/lookup data and just does basic sanity checks */
|
/** enumerates the entire FST/lookup data and just does basic sanity checks */
|
||||||
|
@Slow
|
||||||
public void testEnumerateAll() throws Exception {
|
public void testEnumerateAll() throws Exception {
|
||||||
// just for debugging
|
// just for debugging
|
||||||
int numTerms = 0;
|
int numTerms = 0;
|
||||||
|
|
|
@ -94,7 +94,7 @@ public class TestPhoneticFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), a, 200 * RANDOM_MULTIPLIER);
|
||||||
a.close();
|
a.close();
|
||||||
|
|
||||||
Analyzer b = new Analyzer() {
|
Analyzer b = new Analyzer() {
|
||||||
|
|
|
@ -255,14 +255,14 @@ public class TestSmartChineseAnalyzer extends BaseTokenStreamTestCase {
|
||||||
/** blast some random strings through the analyzer */
|
/** blast some random strings through the analyzer */
|
||||||
public void testRandomStrings() throws Exception {
|
public void testRandomStrings() throws Exception {
|
||||||
Analyzer analyzer = new SmartChineseAnalyzer();
|
Analyzer analyzer = new SmartChineseAnalyzer();
|
||||||
checkRandomData(random(), analyzer, 1000*RANDOM_MULTIPLIER);
|
checkRandomData(random(), analyzer, 200 * RANDOM_MULTIPLIER);
|
||||||
analyzer.close();
|
analyzer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** blast some random large strings through the analyzer */
|
/** blast some random large strings through the analyzer */
|
||||||
public void testRandomHugeStrings() throws Exception {
|
public void testRandomHugeStrings() throws Exception {
|
||||||
Analyzer analyzer = new SmartChineseAnalyzer();
|
Analyzer analyzer = new SmartChineseAnalyzer();
|
||||||
checkRandomData(random(), analyzer, 100*RANDOM_MULTIPLIER, 8192);
|
checkRandomData(random(), analyzer, 3 * RANDOM_MULTIPLIER, 8192);
|
||||||
analyzer.close();
|
analyzer.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,10 +91,11 @@ public class BM25NBClassifierTest extends ClassificationTestBase<BytesRef> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test @Slow
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
BM25NBClassifier classifier = new BM25NBClassifier(leafReader,
|
BM25NBClassifier classifier = new BM25NBClassifier(leafReader,
|
||||||
analyzer, null, categoryFieldName, textFieldName);
|
analyzer, null, categoryFieldName, textFieldName);
|
||||||
|
|
|
@ -84,7 +84,8 @@ public class BooleanPerceptronClassifierTest extends ClassificationTestBase<Bool
|
||||||
@Test
|
@Test
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
BooleanPerceptronClassifier classifier = new BooleanPerceptronClassifier(leafReader, analyzer, null, 1, null, booleanFieldName, textFieldName);
|
BooleanPerceptronClassifier classifier = new BooleanPerceptronClassifier(leafReader, analyzer, null, 1, null, booleanFieldName, textFieldName);
|
||||||
|
|
||||||
|
|
|
@ -93,7 +93,8 @@ public class CachingNaiveBayesClassifierTest extends ClassificationTestBase<Byte
|
||||||
@Test
|
@Test
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
CachingNaiveBayesClassifier simpleNaiveBayesClassifier = new CachingNaiveBayesClassifier(leafReader,
|
CachingNaiveBayesClassifier simpleNaiveBayesClassifier = new CachingNaiveBayesClassifier(leafReader,
|
||||||
analyzer, null, categoryFieldName, textFieldName);
|
analyzer, null, categoryFieldName, textFieldName);
|
||||||
|
|
|
@ -67,7 +67,8 @@ public class KNearestFuzzyClassifierTest extends ClassificationTestBase<BytesRef
|
||||||
@Test
|
@Test
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
Classifier<BytesRef> classifier = new KNearestFuzzyClassifier(leafReader, null, analyzer, null, 3, categoryFieldName, textFieldName);
|
Classifier<BytesRef> classifier = new KNearestFuzzyClassifier(leafReader, null, analyzer, null, 3, categoryFieldName, textFieldName);
|
||||||
|
|
||||||
|
|
|
@ -123,7 +123,8 @@ public class KNearestNeighborClassifierTest extends ClassificationTestBase<Bytes
|
||||||
@Test
|
@Test
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
KNearestNeighborClassifier kNearestNeighborClassifier = new KNearestNeighborClassifier(leafReader, null,
|
KNearestNeighborClassifier kNearestNeighborClassifier = new KNearestNeighborClassifier(leafReader, null,
|
||||||
analyzer, null, 1, 1, 1, categoryFieldName, textFieldName);
|
analyzer, null, 1, 1, 1, categoryFieldName, textFieldName);
|
||||||
|
|
|
@ -96,7 +96,8 @@ public class SimpleNaiveBayesClassifierTest extends ClassificationTestBase<Bytes
|
||||||
@Test
|
@Test
|
||||||
public void testPerformance() throws Exception {
|
public void testPerformance() throws Exception {
|
||||||
MockAnalyzer analyzer = new MockAnalyzer(random());
|
MockAnalyzer analyzer = new MockAnalyzer(random());
|
||||||
LeafReader leafReader = getRandomIndex(analyzer, 100);
|
int numDocs = atLeast(10);
|
||||||
|
LeafReader leafReader = getRandomIndex(analyzer, numDocs);
|
||||||
try {
|
try {
|
||||||
SimpleNaiveBayesClassifier simpleNaiveBayesClassifier = new SimpleNaiveBayesClassifier(leafReader,
|
SimpleNaiveBayesClassifier simpleNaiveBayesClassifier = new SimpleNaiveBayesClassifier(leafReader,
|
||||||
analyzer, null, categoryFieldName, textFieldName);
|
analyzer, null, categoryFieldName, textFieldName);
|
||||||
|
|
|
@ -66,7 +66,8 @@ public class DataSplitterTest extends LuceneTestCase {
|
||||||
|
|
||||||
Document doc;
|
Document doc;
|
||||||
Random rnd = random();
|
Random rnd = random();
|
||||||
for (int i = 0; i < 1000; i++) {
|
int numDocs = atLeast(100);
|
||||||
|
for (int i = 0; i < numDocs; i++) {
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field(idFieldName, "id" + Integer.toString(i), ft));
|
doc.add(new Field(idFieldName, "id" + Integer.toString(i), ft));
|
||||||
doc.add(new Field(textFieldName, TestUtil.randomUnicodeString(rnd, 1024), ft));
|
doc.add(new Field(textFieldName, TestUtil.randomUnicodeString(rnd, 1024), ft));
|
||||||
|
|
|
@ -122,6 +122,8 @@ public class TestIndexedDISI extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: can this be toned down?
|
||||||
|
@Nightly
|
||||||
public void testRandomBlocks() throws IOException {
|
public void testRandomBlocks() throws IOException {
|
||||||
final int BLOCKS = 5;
|
final int BLOCKS = 5;
|
||||||
FixedBitSet set = createSetWithRandomBlocks(BLOCKS);
|
FixedBitSet set = createSetWithRandomBlocks(BLOCKS);
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
||||||
public void testSortedSetVariableLengthBigVsStoredFields() throws Exception {
|
public void testSortedSetVariableLengthBigVsStoredFields() throws Exception {
|
||||||
int numIterations = atLeast(1);
|
int numIterations = atLeast(1);
|
||||||
for (int i = 0; i < numIterations; i++) {
|
for (int i = 0; i < numIterations; i++) {
|
||||||
doTestSortedSetVsStoredFields(atLeast(300), 1, 32766, 16, 100);
|
doTestSortedSetVsStoredFields(atLeast(100), 1, 32766, 16, 100);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
||||||
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
|
public void testSortedVariableLengthBigVsStoredFields() throws Exception {
|
||||||
int numIterations = atLeast(1);
|
int numIterations = atLeast(1);
|
||||||
for (int i = 0; i < numIterations; i++) {
|
for (int i = 0; i < numIterations; i++) {
|
||||||
doTestSortedVsStoredFields(atLeast(300), 1d, 1, 32766);
|
doTestSortedVsStoredFields(atLeast(100), 1d, 1, 32766);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,7 +439,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Slow
|
@Nightly
|
||||||
public void testSortedSetAroundBlockSize() throws IOException {
|
public void testSortedSetAroundBlockSize() throws IOException {
|
||||||
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
|
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
|
||||||
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
||||||
|
@ -492,7 +492,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Slow
|
@Nightly
|
||||||
public void testSortedNumericAroundBlockSize() throws IOException {
|
public void testSortedNumericAroundBlockSize() throws IOException {
|
||||||
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
|
final int frontier = 1 << Lucene80DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT;
|
||||||
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
for (int maxDoc = frontier - 1; maxDoc <= frontier + 1; ++maxDoc) {
|
||||||
|
@ -544,7 +544,7 @@ public class TestLucene80DocValuesFormat extends BaseCompressingDocValuesFormatT
|
||||||
doTestSortedNumericBlocksOfVariousBitsPerValue(() -> TestUtil.nextInt(random(), 0, 2));
|
doTestSortedNumericBlocksOfVariousBitsPerValue(() -> TestUtil.nextInt(random(), 0, 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Slow
|
@Nightly
|
||||||
public void testNumericBlocksOfVariousBitsPerValue() throws Exception {
|
public void testNumericBlocksOfVariousBitsPerValue() throws Exception {
|
||||||
doTestSparseNumericBlocksOfVariousBitsPerValue(1);
|
doTestSparseNumericBlocksOfVariousBitsPerValue(1);
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ public abstract class BaseShapeTestCase extends LuceneTestCase {
|
||||||
|
|
||||||
// A particularly tricky adversary for BKD tree:
|
// A particularly tricky adversary for BKD tree:
|
||||||
public void testSameShapeManyTimes() throws Exception {
|
public void testSameShapeManyTimes() throws Exception {
|
||||||
int numShapes = atLeast(50);
|
int numShapes = TEST_NIGHTLY ? atLeast(50) : atLeast(10);
|
||||||
|
|
||||||
// Every doc has 2 points:
|
// Every doc has 2 points:
|
||||||
Object theShape = nextShape();
|
Object theShape = nextShape();
|
||||||
|
|
|
@ -223,7 +223,7 @@ public class TestLatLonShape extends LuceneTestCase {
|
||||||
|
|
||||||
/** test we can search for a point with a large number of vertices*/
|
/** test we can search for a point with a large number of vertices*/
|
||||||
public void testLargeVertexPolygon() throws Exception {
|
public void testLargeVertexPolygon() throws Exception {
|
||||||
int numVertices = TestUtil.nextInt(random(), 200000, 500000);
|
int numVertices = TEST_NIGHTLY ? TestUtil.nextInt(random(), 200000, 500000) : TestUtil.nextInt(random(), 20000, 50000);
|
||||||
IndexWriterConfig iwc = newIndexWriterConfig();
|
IndexWriterConfig iwc = newIndexWriterConfig();
|
||||||
iwc.setMergeScheduler(new SerialMergeScheduler());
|
iwc.setMergeScheduler(new SerialMergeScheduler());
|
||||||
int mbd = iwc.getMaxBufferedDocs();
|
int mbd = iwc.getMaxBufferedDocs();
|
||||||
|
|
|
@ -37,6 +37,17 @@ public class TestTessellator extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleTessellation() throws Exception {
|
public void testSimpleTessellation() throws Exception {
|
||||||
|
Polygon poly = GeoTestUtil.createRegularPolygon(0.0, 0.0, 100000, 100000);
|
||||||
|
Polygon inner = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
|
||||||
|
new double[]{1.0, -1.0, -0.5, -1.0, 1.0, 0.5, 1.0});
|
||||||
|
Polygon inner2 = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
|
||||||
|
new double[]{-2.0, -4.0, -3.5, -4.0, -2.0, -2.5, -2.0});
|
||||||
|
poly = new Polygon(poly.getPolyLats(), poly.getPolyLons(), inner, inner2);
|
||||||
|
assertTrue(Tessellator.tessellate(poly).size() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Nightly
|
||||||
|
public void testSimpleTessellationAtNight() throws Exception {
|
||||||
Polygon poly = GeoTestUtil.createRegularPolygon(0.0, 0.0, 1000000, 1000000);
|
Polygon poly = GeoTestUtil.createRegularPolygon(0.0, 0.0, 1000000, 1000000);
|
||||||
Polygon inner = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
|
Polygon inner = new Polygon(new double[] {-1.0, -1.0, 0.5, 1.0, 1.0, 0.5, -1.0},
|
||||||
new double[]{1.0, -1.0, -0.5, -1.0, 1.0, 0.5, 1.0});
|
new double[]{1.0, -1.0, -0.5, -1.0, 1.0, 0.5, 1.0});
|
||||||
|
|
|
@ -486,7 +486,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
doTestOperationsOnDiskFull(false);
|
doTestOperationsOnDiskFull(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Slow
|
// TODO: can we tone this test down so it isn't crazy slow?
|
||||||
|
@Nightly
|
||||||
public void testUpdatesOnDiskFull() throws IOException {
|
public void testUpdatesOnDiskFull() throws IOException {
|
||||||
doTestOperationsOnDiskFull(true);
|
doTestOperationsOnDiskFull(true);
|
||||||
}
|
}
|
||||||
|
|
|
@ -749,7 +749,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
};
|
};
|
||||||
|
|
||||||
final int NUM_THREAD = 3;
|
final int NUM_THREAD = 3;
|
||||||
final int NUM_ITER = 100;
|
final int NUM_ITER = atLeast(10);
|
||||||
|
|
||||||
for(int i=0;i<2;i++) {
|
for(int i=0;i<2;i++) {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
|
@ -1950,6 +1950,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: can be super slow in pathological cases (merge config?)
|
||||||
|
@Nightly
|
||||||
public void testMergeExceptionIsTragic() throws Exception {
|
public void testMergeExceptionIsTragic() throws Exception {
|
||||||
MockDirectoryWrapper dir = newMockDirectory();
|
MockDirectoryWrapper dir = newMockDirectory();
|
||||||
final AtomicBoolean didFail = new AtomicBoolean();
|
final AtomicBoolean didFail = new AtomicBoolean();
|
||||||
|
|
|
@ -1522,7 +1522,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
|
||||||
IndexWriter writer = new IndexWriter(dir, conf);
|
IndexWriter writer = new IndexWriter(dir, conf);
|
||||||
|
|
||||||
// test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
|
// test data: lots of documents (few 10Ks) and lots of update terms (few hundreds)
|
||||||
final int numDocs = atLeast(20000);
|
final int numDocs = TEST_NIGHTLY ? atLeast(20000) : atLeast(200);
|
||||||
final int numNumericFields = atLeast(5);
|
final int numNumericFields = atLeast(5);
|
||||||
final int numTerms = TestUtil.nextInt(random, 10, 100); // terms should affect many docs
|
final int numTerms = TestUtil.nextInt(random, 10, 100); // terms should affect many docs
|
||||||
Set<String> updateTerms = new HashSet<>();
|
Set<String> updateTerms = new HashSet<>();
|
||||||
|
|
|
@ -1031,7 +1031,7 @@ public class TestPhraseQuery extends LuceneTestCase {
|
||||||
public void testRandomTopDocs() throws IOException {
|
public void testRandomTopDocs() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||||
int numDocs = atLeast(128 * 8 * 8 * 3); // make sure some terms have skip data
|
int numDocs = TEST_NIGHTLY ? atLeast(128 * 8 * 8 * 3) : atLeast(100); // at night, make sure some terms have skip data
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
int numTerms = random().nextInt(1 << random().nextInt(5));
|
int numTerms = random().nextInt(1 << random().nextInt(5));
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class TestShardSearching extends ShardSearchingTestBase {
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
final int numNodes = TestUtil.nextInt(random(), 1, 10);
|
final int numNodes = TestUtil.nextInt(random(), 1, 10);
|
||||||
|
|
||||||
final double runTimeSec = atLeast(3);
|
final double runTimeSec = TEST_NIGHTLY ? atLeast(5) : atLeast(1);
|
||||||
|
|
||||||
final int minDocsToMakeTerms = TestUtil.nextInt(random(), 5, 20);
|
final int minDocsToMakeTerms = TestUtil.nextInt(random(), 5, 20);
|
||||||
|
|
||||||
|
|
|
@ -385,7 +385,7 @@ public class TestSynonymQuery extends LuceneTestCase {
|
||||||
public void testRandomTopDocs() throws IOException {
|
public void testRandomTopDocs() throws IOException {
|
||||||
Directory dir = newDirectory();
|
Directory dir = newDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||||
int numDocs = atLeast(128 * 8 * 8 * 3); // make sure some terms have skip data
|
int numDocs = TEST_NIGHTLY ? atLeast(128 * 8 * 8 * 3) : atLeast(100); // at night, make sure some terms have skip data
|
||||||
for (int i = 0; i < numDocs; ++i) {
|
for (int i = 0; i < numDocs; ++i) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
int numValues = random().nextInt(1 << random().nextInt(5));
|
int numValues = random().nextInt(1 << random().nextInt(5));
|
||||||
|
|
|
@ -87,7 +87,7 @@ public class TestDocIdSetBuilder extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandom() throws IOException {
|
public void testRandom() throws IOException {
|
||||||
final int maxDoc = TestUtil.nextInt(random(), 1, 10000000);
|
final int maxDoc = TEST_NIGHTLY ? TestUtil.nextInt(random(), 1, 10000000) : TestUtil.nextInt(random(), 1, 100000) ;
|
||||||
for (int i = 1 ; i < maxDoc / 2; i <<=1) {
|
for (int i = 1 ; i < maxDoc / 2; i <<=1) {
|
||||||
final int numDocs = TestUtil.nextInt(random(), 1, i);
|
final int numDocs = TestUtil.nextInt(random(), 1, i);
|
||||||
final FixedBitSet docs = new FixedBitSet(maxDoc);
|
final FixedBitSet docs = new FixedBitSet(maxDoc);
|
||||||
|
|
|
@ -937,7 +937,7 @@ public class TestPackedInts extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPackedLongValues() {
|
public void testPackedLongValues() {
|
||||||
final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 100000)];
|
final long[] arr = new long[RandomNumbers.randomIntBetween(random(), 1, TEST_NIGHTLY ? 1000000 : 10000)];
|
||||||
float[] ratioOptions = new float[]{PackedInts.DEFAULT, PackedInts.COMPACT, PackedInts.FAST};
|
float[] ratioOptions = new float[]{PackedInts.DEFAULT, PackedInts.COMPACT, PackedInts.FAST};
|
||||||
for (int bpv : new int[]{0, 1, 63, 64, RandomNumbers.randomIntBetween(random(), 2, 62)}) {
|
for (int bpv : new int[]{0, 1, 63, 64, RandomNumbers.randomIntBetween(random(), 2, 62)}) {
|
||||||
for (DataType dataType : Arrays.asList(DataType.DELTA_PACKED)) {
|
for (DataType dataType : Arrays.asList(DataType.DELTA_PACKED)) {
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class TestExpressionSorts extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQueries() throws Exception {
|
public void testQueries() throws Exception {
|
||||||
int n = atLeast(4);
|
int n = atLeast(1);
|
||||||
for (int i = 0; i < n; i++) {
|
for (int i = 0; i < n; i++) {
|
||||||
assertQuery(new MatchAllDocsQuery());
|
assertQuery(new MatchAllDocsQuery());
|
||||||
assertQuery(new TermQuery(new Term("english", "one")));
|
assertQuery(new TermQuery(new Term("english", "one")));
|
||||||
|
|
|
@ -489,7 +489,7 @@ public class TestDrillSideways extends FacetTestCase {
|
||||||
|
|
||||||
int numDims = TestUtil.nextInt(random(), 2, 5);
|
int numDims = TestUtil.nextInt(random(), 2, 5);
|
||||||
//int numDims = 3;
|
//int numDims = 3;
|
||||||
int numDocs = atLeast(3000);
|
int numDocs = atLeast(300);
|
||||||
//int numDocs = 20;
|
//int numDocs = 20;
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println(
|
System.out.println(
|
||||||
|
|
|
@ -226,7 +226,7 @@ public class TestSearcherTaxonomyManager extends FacetTestCase {
|
||||||
final AtomicBoolean stop = new AtomicBoolean();
|
final AtomicBoolean stop = new AtomicBoolean();
|
||||||
|
|
||||||
// How many unique facets to index before stopping:
|
// How many unique facets to index before stopping:
|
||||||
final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;
|
final int ordLimit = TEST_NIGHTLY ? 100000 : 600;
|
||||||
|
|
||||||
Thread indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop);
|
Thread indexer = new IndexerThread(w, config, tw, mgr, ordLimit, stop);
|
||||||
indexer.start();
|
indexer.start();
|
||||||
|
|
|
@ -706,7 +706,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
|
||||||
final int abOrd = trBase.getOrdinal(abPath);
|
final int abOrd = trBase.getOrdinal(abPath);
|
||||||
final int abYoungChildBase1 = ca1.children()[abOrd];
|
final int abYoungChildBase1 = ca1.children()[abOrd];
|
||||||
|
|
||||||
final int numCategories = atLeast(800);
|
final int numCategories = atLeast(200);
|
||||||
for (int i = 0; i < numCategories; i++) {
|
for (int i = 0; i < numCategories; i++) {
|
||||||
twBase.addCategory(new FacetLabel("a", "b", Integer.toString(i)));
|
twBase.addCategory(new FacetLabel("a", "b", Integer.toString(i)));
|
||||||
}
|
}
|
||||||
|
@ -720,7 +720,7 @@ public class TestTaxonomyCombined extends FacetTestCase {
|
||||||
final ParallelTaxonomyArrays ca2 = trBase.getParallelTaxonomyArrays();
|
final ParallelTaxonomyArrays ca2 = trBase.getParallelTaxonomyArrays();
|
||||||
final int abYoungChildBase2 = ca2.children()[abOrd];
|
final int abYoungChildBase2 = ca2.children()[abOrd];
|
||||||
|
|
||||||
int numRetries = atLeast(50);
|
int numRetries = atLeast(10);
|
||||||
for (int retry = 0; retry < numRetries; retry++) {
|
for (int retry = 0; retry < numRetries; retry++) {
|
||||||
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, numCategories);
|
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry, numCategories);
|
||||||
}
|
}
|
||||||
|
|
|
@ -401,7 +401,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
|
||||||
FacetsConfig config = new FacetsConfig();
|
FacetsConfig config = new FacetsConfig();
|
||||||
config.setMultiValued("dim", true);
|
config.setMultiValued("dim", true);
|
||||||
|
|
||||||
int numLabels = TestUtil.nextInt(random(), 40000, 100000);
|
int numLabels = TEST_NIGHTLY ? TestUtil.nextInt(random(), 40000, 100000) : TestUtil.nextInt(random(), 4000, 10000);
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(newTextField("field", "text", Field.Store.NO));
|
doc.add(newTextField("field", "text", Field.Store.NO));
|
||||||
|
|
|
@ -247,7 +247,7 @@ public class TestDirectoryTaxonomyWriter extends FacetTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testConcurrency() throws Exception {
|
public void testConcurrency() throws Exception {
|
||||||
final int ncats = atLeast(100000); // add many categories
|
final int ncats = TEST_NIGHTLY ? atLeast(100000) : atLeast(1000); // at night, add many categories
|
||||||
final int range = ncats * 3; // affects the categories selection
|
final int range = ncats * 3; // affects the categories selection
|
||||||
final AtomicInteger numCats = new AtomicInteger(ncats);
|
final AtomicInteger numCats = new AtomicInteger(ncats);
|
||||||
final Directory dir = newDirectory();
|
final Directory dir = newDirectory();
|
||||||
|
|
|
@ -83,8 +83,6 @@ import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||||
private Set<String> queries = new HashSet<>();
|
private Set<String> queries = new HashSet<>();
|
||||||
|
|
||||||
public static final int ITERATIONS = 100 * RANDOM_MULTIPLIER;
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
|
@ -115,7 +113,8 @@ public class TestMemoryIndexAgainstDirectory extends BaseTokenStreamTestCase {
|
||||||
*/
|
*/
|
||||||
public void testRandomQueries() throws Exception {
|
public void testRandomQueries() throws Exception {
|
||||||
MemoryIndex index = randomMemoryIndex();
|
MemoryIndex index = randomMemoryIndex();
|
||||||
for (int i = 0; i < ITERATIONS; i++) {
|
int iterations = TEST_NIGHTLY ? 100 * RANDOM_MULTIPLIER : 10 * RANDOM_MULTIPLIER;
|
||||||
|
for (int i = 0; i < iterations; i++) {
|
||||||
assertAgainstDirectory(index);
|
assertAgainstDirectory(index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class TestCachePurging extends MonitorTestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testConcurrentPurges() throws Exception {
|
public void testConcurrentPurges() throws Exception {
|
||||||
int iters = Integer.getInteger("purgeIters", 2);
|
int iters = Integer.getInteger("purgeIters", 1);
|
||||||
for (int i = 0; i < iters; i++) {
|
for (int i = 0; i < iters; i++) {
|
||||||
doConcurrentPurgesAndUpdatesTest();
|
doConcurrentPurgesAndUpdatesTest();
|
||||||
}
|
}
|
||||||
|
|
|
@ -157,8 +157,11 @@ public class Geo3dRptTest extends RandomSpatialOpStrategyTestCase {
|
||||||
final List<Shape> queryShapes = new ArrayList<>();
|
final List<Shape> queryShapes = new ArrayList<>();
|
||||||
while(querySpatialData.hasNext()) {
|
while(querySpatialData.hasNext()) {
|
||||||
queryShapes.add(querySpatialData.next().shape);
|
queryShapes.add(querySpatialData.next().shape);
|
||||||
queryShapes.add(randomQueryShape());
|
if (TEST_NIGHTLY) {
|
||||||
|
queryShapes.add(randomQueryShape());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
queryShapes.add(randomQueryShape());
|
||||||
testOperation(SpatialOperation.Intersects, indexedShapes, queryShapes, random().nextBoolean());
|
testOperation(SpatialOperation.Intersects, indexedShapes, queryShapes, random().nextBoolean());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,4 +115,22 @@ public class Geo3dShapeWGS84ModelRectRelationTest extends ShapeRectRelationTestC
|
||||||
|
|
||||||
assertEquals(circle.relate(bPoint), SpatialRelation.CONTAINS);
|
assertEquals(circle.relate(bPoint), SpatialRelation.CONTAINS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// very slow, test sources are not all here, no clue how to fix it
|
||||||
|
@Nightly
|
||||||
|
public void testGeoCircleRect() {
|
||||||
|
super.testGeoCircleRect();
|
||||||
|
}
|
||||||
|
|
||||||
|
// very slow, test sources are not all here, no clue how to fix it
|
||||||
|
@Nightly
|
||||||
|
public void testGeoPolygonRect() {
|
||||||
|
super.testGeoPolygonRect();
|
||||||
|
}
|
||||||
|
|
||||||
|
// very slow, test sources are not all here, no clue how to fix it
|
||||||
|
@Nightly
|
||||||
|
public void testGeoPathRect() {
|
||||||
|
super.testGeoPathRect();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,14 +46,14 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
|
||||||
super(ctx);
|
super(ctx);
|
||||||
}
|
}
|
||||||
|
|
||||||
//20 times each -- should be plenty
|
//2 times each -- should be plenty
|
||||||
|
|
||||||
protected int getContainsMinimum(int laps) {
|
protected int getContainsMinimum(int laps) {
|
||||||
return 20;
|
return 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected int getIntersectsMinimum(int laps) {
|
protected int getIntersectsMinimum(int laps) {
|
||||||
return 20;
|
return 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
// producing "within" cases in Geo3D based on our random shapes doesn't happen often. It'd be nice to increase this.
|
// producing "within" cases in Geo3D based on our random shapes doesn't happen often. It'd be nice to increase this.
|
||||||
|
@ -62,11 +62,11 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
protected int getDisjointMinimum(int laps) {
|
protected int getDisjointMinimum(int laps) {
|
||||||
return 20;
|
return 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected int getBoundingMinimum(int laps) {
|
protected int getBoundingMinimum(int laps) {
|
||||||
return 20;
|
return 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,6 +117,7 @@ public abstract class ShapeRectRelationTestCase extends RandomizedShapeTestCase
|
||||||
}.testRelateWithRectangle();
|
}.testRelateWithRectangle();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// very slow, and test sources are not here, so no clue how to fix
|
||||||
@Test
|
@Test
|
||||||
public void testGeoPolygonRect() {
|
public void testGeoPolygonRect() {
|
||||||
new AbstractRectIntersectionTestHelper(ctx) {
|
new AbstractRectIntersectionTestHelper(ctx) {
|
||||||
|
|
|
@ -189,7 +189,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
|
||||||
/** Tests consistency of GeoArea.getRelationship vs GeoShape.isWithin */
|
/** Tests consistency of GeoArea.getRelationship vs GeoShape.isWithin */
|
||||||
public void testGeo3DRelations() throws Exception {
|
public void testGeo3DRelations() throws Exception {
|
||||||
|
|
||||||
int numDocs = atLeast(1000);
|
int numDocs = atLeast(200);
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: " + numDocs + " docs");
|
System.out.println("TEST: " + numDocs + " docs");
|
||||||
}
|
}
|
||||||
|
@ -207,8 +207,6 @@ public class TestGeo3DPoint extends LuceneTestCase {
|
||||||
int iters = atLeast(10);
|
int iters = atLeast(10);
|
||||||
|
|
||||||
int recurseDepth = RandomNumbers.randomIntBetween(random(), 5, 15);
|
int recurseDepth = RandomNumbers.randomIntBetween(random(), 5, 15);
|
||||||
|
|
||||||
iters = atLeast(50);
|
|
||||||
|
|
||||||
for(int iter=0;iter<iters;iter++) {
|
for(int iter=0;iter<iters;iter++) {
|
||||||
GeoShape shape = randomShape();
|
GeoShape shape = randomShape();
|
||||||
|
@ -472,7 +470,7 @@ public class TestGeo3DPoint extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomMedium() throws Exception {
|
public void testRandomMedium() throws Exception {
|
||||||
doTestRandom(10000);
|
doTestRandom(1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nightly
|
@Nightly
|
||||||
|
|
|
@ -39,7 +39,7 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble;
|
||||||
public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
public class RandomGeo3dShapeGenerator extends LuceneTestCase {
|
||||||
|
|
||||||
/* Max num of iterations to find right shape under given constrains */
|
/* Max num of iterations to find right shape under given constrains */
|
||||||
final private static int MAX_SHAPE_ITERATIONS = 50;
|
final private static int MAX_SHAPE_ITERATIONS = 20;
|
||||||
/* Max num of iterations to find right point under given constrains */
|
/* Max num of iterations to find right point under given constrains */
|
||||||
final private static int MAX_POINT_ITERATIONS = 1000;
|
final private static int MAX_POINT_ITERATIONS = 1000;
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.BiasedNumbers;
|
import com.carrotsearch.randomizedtesting.generators.BiasedNumbers;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -30,7 +29,6 @@ import org.junit.Test;
|
||||||
public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
|
public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 10)
|
|
||||||
public void testRandomLUCENE8157() {
|
public void testRandomLUCENE8157() {
|
||||||
final PlanetModel planetModel = randomPlanetModel();
|
final PlanetModel planetModel = randomPlanetModel();
|
||||||
final GeoPoint startPoint = randomGeoPoint(planetModel);
|
final GeoPoint startPoint = randomGeoPoint(planetModel);
|
||||||
|
@ -92,7 +90,6 @@ public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
|
||||||
* biased doubles.
|
* biased doubles.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 10)
|
|
||||||
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
|
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
|
||||||
public void testCompareBigPolygons() {
|
public void testCompareBigPolygons() {
|
||||||
testComparePolygons(Math.PI);
|
testComparePolygons(Math.PI);
|
||||||
|
@ -103,7 +100,6 @@ public class RandomGeoPolygonTest extends RandomGeo3dShapeGenerator {
|
||||||
* biased doubles.
|
* biased doubles.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 10)
|
|
||||||
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
|
//@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/LUCENE-8281")
|
||||||
public void testCompareSmallPolygons() {
|
public void testCompareSmallPolygons() {
|
||||||
testComparePolygons(1e-4 * Math.PI);
|
testComparePolygons(1e-4 * Math.PI);
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
|
|
||||||
package org.apache.lucene.spatial3d.geom;
|
package org.apache.lucene.spatial3d.geom;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.Repeat;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -32,7 +31,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 5)
|
|
||||||
public void testRandomPointWithin() {
|
public void testRandomPointWithin() {
|
||||||
int referenceShapeType = CONVEX_POLYGON;
|
int referenceShapeType = CONVEX_POLYGON;
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
|
@ -65,7 +63,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
* the original shape.
|
* the original shape.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@Repeat(iterations = 5)
|
|
||||||
public void testRandomPointNotWithin() {
|
public void testRandomPointNotWithin() {
|
||||||
int referenceShapeType = CONVEX_POLYGON;
|
int referenceShapeType = CONVEX_POLYGON;
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
|
@ -97,7 +94,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
* Note that both shapes cannot be concave.
|
* Note that both shapes cannot be concave.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 5)
|
|
||||||
public void testRandomDisjoint() {
|
public void testRandomDisjoint() {
|
||||||
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
|
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
|
@ -136,7 +132,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
* Note that if the geoAreaShape is not concave the other shape must be not concave.
|
* Note that if the geoAreaShape is not concave the other shape must be not concave.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 5)
|
|
||||||
public void testRandomWithIn() {
|
public void testRandomWithIn() {
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
int geoAreaShapeType = randomGeoAreaShapeType();
|
int geoAreaShapeType = randomGeoAreaShapeType();
|
||||||
|
@ -187,7 +182,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 1)
|
|
||||||
public void testRandomContains() {
|
public void testRandomContains() {
|
||||||
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
|
int referenceShapeType = CONVEX_SIMPLE_POLYGON;
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
|
@ -236,7 +230,6 @@ public class RandomGeoShapeRelationshipTest extends RandomGeo3dShapeGenerator {
|
||||||
* the geoAreaShape.
|
* the geoAreaShape.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@Repeat(iterations = 5)
|
|
||||||
public void testRandomOverlaps() {
|
public void testRandomOverlaps() {
|
||||||
PlanetModel planetModel = randomPlanetModel();
|
PlanetModel planetModel = randomPlanetModel();
|
||||||
int geoAreaShapeType = randomGeoAreaShapeType();
|
int geoAreaShapeType = randomGeoAreaShapeType();
|
||||||
|
|
|
@ -549,7 +549,7 @@ public abstract class BaseTokenStreamTestCase extends LuceneTestCase {
|
||||||
Directory dir = null;
|
Directory dir = null;
|
||||||
RandomIndexWriter iw = null;
|
RandomIndexWriter iw = null;
|
||||||
final String postingsFormat = TestUtil.getPostingsFormat("dummy");
|
final String postingsFormat = TestUtil.getPostingsFormat("dummy");
|
||||||
boolean codecOk = iterations * maxWordLength < 100000 || !(postingsFormat.equals("SimpleText"));
|
boolean codecOk = iterations * maxWordLength < 100000 && !(postingsFormat.equals("SimpleText"));
|
||||||
if (rarely(random) && codecOk) {
|
if (rarely(random) && codecOk) {
|
||||||
dir = newFSDirectory(createTempDir("bttc"));
|
dir = newFSDirectory(createTempDir("bttc"));
|
||||||
iw = new RandomIndexWriter(new Random(seed), dir, a);
|
iw = new RandomIndexWriter(new Random(seed), dir, a);
|
||||||
|
|
|
@ -388,7 +388,8 @@ public abstract class BaseMergePolicyTestCase extends LuceneTestCase {
|
||||||
* Simulate an update use-case where documents are uniformly updated across segments.
|
* Simulate an update use-case where documents are uniformly updated across segments.
|
||||||
*/
|
*/
|
||||||
public void testSimulateUpdates() throws IOException {
|
public void testSimulateUpdates() throws IOException {
|
||||||
doTestSimulateUpdates(mergePolicy(), 10_000_000, 2500);
|
int numDocs = atLeast(1_000_000);
|
||||||
|
doTestSimulateUpdates(mergePolicy(), numDocs, 2500);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -508,7 +508,7 @@ public abstract class BasePointsFormatTestCase extends BaseIndexFileFormatTestCa
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRandomBinaryMedium() throws Exception {
|
public void testRandomBinaryMedium() throws Exception {
|
||||||
doTestRandomBinary(10000);
|
doTestRandomBinary(1000);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nightly
|
@Nightly
|
||||||
|
|
|
@ -586,6 +586,7 @@ public abstract class BaseTermVectorsFormatTestCase extends BaseIndexFileFormatT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Slow
|
||||||
public void testLotsOfFields() throws IOException {
|
public void testLotsOfFields() throws IOException {
|
||||||
final RandomDocumentFactory docFactory = new RandomDocumentFactory(5000, 10);
|
final RandomDocumentFactory docFactory = new RandomDocumentFactory(5000, 10);
|
||||||
for (Options options : validOptions()) {
|
for (Options options : validOptions()) {
|
||||||
|
|
|
@ -251,7 +251,7 @@ public abstract class BaseSimilarityTestCase extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void testRandomScoring() throws Exception {
|
public void testRandomScoring() throws Exception {
|
||||||
Random random = random();
|
Random random = random();
|
||||||
final int iterations = atLeast(3);
|
final int iterations = atLeast(1);
|
||||||
for (int i = 0; i < iterations; i++) {
|
for (int i = 0; i < iterations; i++) {
|
||||||
// pull a new similarity to switch up parameters
|
// pull a new similarity to switch up parameters
|
||||||
Similarity similarity = getSimilarity(random);
|
Similarity similarity = getSimilarity(random);
|
||||||
|
|
|
@ -79,7 +79,15 @@ public abstract class BaseDocIdSetTestCase<T extends DocIdSet> extends LuceneTes
|
||||||
copy = copyOf(set, numBits); // then random index
|
copy = copyOf(set, numBits); // then random index
|
||||||
assertEquals(numBits, set, copy);
|
assertEquals(numBits, set, copy);
|
||||||
// test regular increments
|
// test regular increments
|
||||||
|
int maxIterations = TEST_NIGHTLY ? Integer.MAX_VALUE : 10;
|
||||||
|
int iterations = 0;
|
||||||
for (int inc = 2; inc < 1000; inc += TestUtil.nextInt(random(), 1, 100)) {
|
for (int inc = 2; inc < 1000; inc += TestUtil.nextInt(random(), 1, 100)) {
|
||||||
|
// don't let this test run too many times, even if it gets unlucky with "inc"
|
||||||
|
if (iterations >= maxIterations) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
iterations++;
|
||||||
|
|
||||||
set = new BitSet(numBits);
|
set = new BitSet(numBits);
|
||||||
for (int d = random().nextInt(10); d < numBits; d += inc) {
|
for (int d = random().nextInt(10); d < numBits; d += inc) {
|
||||||
set.set(d);
|
set.set(d);
|
||||||
|
|
Loading…
Reference in New Issue