mirror of https://github.com/apache/lucene.git
LUCENE-3020: better payload testing with mockanalyzer
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1091132 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9466f678f3
commit
7d07d206b5
|
@ -58,7 +58,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox jumped";
|
||||
final Directory directory = newDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamConcurrent(),
|
||||
|
@ -102,7 +102,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox jumped";
|
||||
final Directory directory = newDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamConcurrent(),
|
||||
|
@ -172,7 +172,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = newDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamSparse(),
|
||||
|
@ -215,7 +215,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = newDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, TEXT, Store.YES, Index.ANALYZED,
|
||||
|
@ -256,7 +256,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = newDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamSparse(),
|
||||
|
|
|
@ -90,7 +90,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
Directory ramDir;
|
||||
public IndexSearcher searcher = null;
|
||||
int numHighlights = 0;
|
||||
final Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
final Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
TopDocs hits;
|
||||
|
||||
String[] texts = {
|
||||
|
@ -101,7 +101,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
"wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" };
|
||||
|
||||
public void testQueryScorerHits() throws Exception {
|
||||
Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
query = qp.parse("\"very long\"");
|
||||
searcher = new IndexSearcher(ramDir, true);
|
||||
|
@ -133,7 +133,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
String s1 = "I call our world Flatland, not because we call it so,";
|
||||
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
|
||||
// Verify that a query against the default field results in text being
|
||||
// highlighted
|
||||
|
@ -165,7 +165,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
*/
|
||||
private static String highlightField(Query query, String fieldName, String text)
|
||||
throws IOException, InvalidTokenOffsetsException {
|
||||
TokenStream tokenStream = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
|
||||
TokenStream tokenStream = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true).tokenStream(fieldName, new StringReader(text));
|
||||
// Assuming "<B>", "</B>" used to highlight
|
||||
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
|
||||
QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
|
||||
|
@ -210,7 +210,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
String f2c = f2 + ":";
|
||||
String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
|
||||
+ " OR " + f2c + ph2 + ")";
|
||||
Analyzer analyzer = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
|
||||
Query query = qp.parse(q);
|
||||
|
||||
|
@ -1134,13 +1134,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
sb.append("stoppedtoken");
|
||||
}
|
||||
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
|
||||
Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
|
||||
Highlighter hg = getHighlighter(query, "data", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream(
|
||||
"data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
|
||||
// new
|
||||
// QueryTermScorer(query));
|
||||
hg.setTextFragmenter(new NullFragmenter());
|
||||
hg.setMaxDocCharsToAnalyze(100);
|
||||
match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
|
||||
match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
|
||||
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
|
||||
.getMaxDocCharsToAnalyze());
|
||||
|
||||
|
@ -1151,7 +1151,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
// + whitespace)
|
||||
sb.append(" ");
|
||||
sb.append(goodWord);
|
||||
match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
|
||||
match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "data", sb.toString());
|
||||
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
|
||||
.getMaxDocCharsToAnalyze());
|
||||
}
|
||||
|
@ -1170,10 +1170,10 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
String text = "this is a text with searchterm in it";
|
||||
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
|
||||
Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
|
||||
Highlighter hg = getHighlighter(query, "text", new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true).tokenStream("text", new StringReader(text)), fm);
|
||||
hg.setTextFragmenter(new NullFragmenter());
|
||||
hg.setMaxDocCharsToAnalyze(36);
|
||||
String match = hg.getBestFragment(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
|
||||
String match = hg.getBestFragment(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopWords, true), "text", text);
|
||||
assertTrue(
|
||||
"Matched text should contain remainder of text after highlighted query ",
|
||||
match.endsWith("in it"));
|
||||
|
@ -1191,7 +1191,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
// test to show how rewritten query can still be used
|
||||
if (searcher != null) searcher.close();
|
||||
searcher = new IndexSearcher(ramDir, true);
|
||||
Analyzer analyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
Query query = parser.parse("JF? or Kenned*");
|
||||
|
@ -1446,64 +1446,64 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
Highlighter highlighter;
|
||||
String result;
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-Speed10 <B>foo</B>", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-Speed<B>10</B> foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi</B>-Speed10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-<B>Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
// ///////////////// same tests, just put the bigger overlapping token
|
||||
// first
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("foo");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("foo");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-Speed10 <B>foo</B>", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("10");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("10");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-Speed<B>10</B> foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi</B>-Speed10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("speed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-<B>Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hispeed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hispeed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("hi speed");
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("hi speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
@ -1514,7 +1514,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private Directory dir;
|
||||
private Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
private Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
|
||||
public void testWeightedTermsWithDeletes() throws IOException, ParseException, InvalidTokenOffsetsException {
|
||||
makeIndex();
|
||||
|
@ -1529,7 +1529,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void makeIndex() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
|
@ -1539,7 +1539,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void deleteDocument() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setOpenMode(OpenMode.APPEND));
|
||||
writer.deleteDocuments( new Term( "t_text1", "del" ) );
|
||||
// To see negative idf, keep comment the following line
|
||||
//writer.optimize();
|
||||
|
@ -1644,7 +1644,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
dir = newDirectory();
|
||||
ramDir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
|
||||
for (String text : texts) {
|
||||
addDoc(writer, text);
|
||||
}
|
||||
|
|
|
@ -87,9 +87,9 @@ public abstract class AbstractTestCase extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
analyzerW = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
analyzerW = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
analyzerB = new BigramAnalyzer();
|
||||
analyzerK = new MockAnalyzer(MockTokenizer.KEYWORD, false);
|
||||
analyzerK = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
|
||||
paW = new QueryParser(TEST_VERSION_CURRENT, F, analyzerW );
|
||||
paB = new QueryParser(TEST_VERSION_CURRENT, F, analyzerB );
|
||||
dir = newDirectory();
|
||||
|
|
|
@ -59,7 +59,7 @@ public class TestEmptyIndex extends LuceneTestCase {
|
|||
|
||||
// make sure a Directory acts the same
|
||||
Directory d = newDirectory();
|
||||
new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
|
||||
new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
|
||||
r = IndexReader.open(d, false);
|
||||
testNorms(r);
|
||||
r.close();
|
||||
|
@ -84,7 +84,7 @@ public class TestEmptyIndex extends LuceneTestCase {
|
|||
|
||||
// make sure a Directory acts the same
|
||||
Directory d = newDirectory();
|
||||
new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).close();
|
||||
new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
|
||||
r = IndexReader.open(d, false);
|
||||
termsEnumTest(r);
|
||||
r.close();
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.util.Arrays;
|
|||
import java.util.Comparator;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
|
@ -65,7 +66,7 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
// create dir data
|
||||
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Document document = new Document();
|
||||
|
@ -88,10 +89,13 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
InstantiatedIndex ii = new InstantiatedIndex();
|
||||
|
||||
|
||||
// we need to pass the "same" random to both, so they surely index the same payload data.
|
||||
long seed = random.nextLong();
|
||||
|
||||
// create dir data
|
||||
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))).setMergePolicy(newLogMergePolicy()));
|
||||
indexWriter.setInfoStream(VERBOSE ? System.out : null);
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: make test index");
|
||||
|
@ -104,7 +108,7 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
indexWriter.close();
|
||||
|
||||
// test ii writer
|
||||
InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(), true);
|
||||
InstantiatedIndexWriter instantiatedIndexWriter = ii.indexWriterFactory(new MockAnalyzer(new Random(seed)), true);
|
||||
for (int i = 0; i < 500; i++) {
|
||||
Document document = new Document();
|
||||
assembleDocument(document, i);
|
||||
|
|
|
@ -34,17 +34,17 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase {
|
|||
|
||||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocument(iw, "Hello, world!");
|
||||
addDocument(iw, "All work and no play makes jack a dull boy");
|
||||
iw.close();
|
||||
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
addDocument(iw, "Hello, tellus!");
|
||||
addDocument(iw, "All work and no play makes danny a dull boy");
|
||||
iw.close();
|
||||
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
addDocument(iw, "Hello, earth!");
|
||||
addDocument(iw, "All work and no play makes wendy a dull girl");
|
||||
iw.close();
|
||||
|
|
|
@ -143,9 +143,9 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
|
|||
*/
|
||||
private Analyzer randomAnalyzer() {
|
||||
switch(random.nextInt(3)) {
|
||||
case 0: return new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
case 1: return new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
default: return new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
case 0: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
case 1: return new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
default: return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TestFieldNormModifier extends LuceneTestCase {
|
|||
super.setUp();
|
||||
store = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
mergePolicy.setNoCFSRatio(1);
|
||||
IndexWriter iw = new IndexWriter(
|
||||
fsDir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMergePolicy(mergePolicy)
|
||||
);
|
||||
|
|
|
@ -32,7 +32,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc;
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
doc = new Document();
|
||||
|
|
|
@ -25,7 +25,7 @@ public class TestTermVectorAccessor extends LuceneTestCase {
|
|||
|
||||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
Document doc;
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ public class TestAppendingCodec extends LuceneTestCase {
|
|||
|
||||
public void testCodec() throws Exception {
|
||||
Directory dir = new AppendingRAMDirectory(random, new RAMDirectory());
|
||||
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer());
|
||||
IndexWriterConfig cfg = new IndexWriterConfig(Version.LUCENE_40, new MockAnalyzer(random));
|
||||
|
||||
cfg.setCodecProvider(new AppendingCodecProvider());
|
||||
((TieredMergePolicy)cfg.getMergePolicy()).setUseCompoundFile(false);
|
||||
|
|
|
@ -40,7 +40,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
|
|||
public static void setUpClass() throws Exception {
|
||||
dir = newDirectory();
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(random,
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
|
||||
.setMaxBufferedDocs(2));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
indexDocs(writer);
|
||||
|
|
|
@ -66,7 +66,7 @@ public class TestLengthNormModifier extends LuceneTestCase {
|
|||
super.setUp();
|
||||
store = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -39,7 +39,7 @@ public class BooleanFilterTest extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
|
||||
//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
|
||||
addDoc(writer, "admin guest", "010", "20040101","Y");
|
||||
|
|
|
@ -43,7 +43,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
//Add series of docs with filterable fields : url, text and dates flags
|
||||
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
|
||||
|
|
|
@ -34,13 +34,13 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
|
|||
private Directory directory;
|
||||
private IndexSearcher searcher;
|
||||
private IndexReader reader;
|
||||
private Analyzer analyzer=new MockAnalyzer();
|
||||
private Analyzer analyzer=new MockAnalyzer(random);
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
//Add series of docs with misspelt names
|
||||
addDoc(writer, "jonathon smythe","1");
|
||||
|
@ -121,7 +121,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testFuzzyLikeThisQueryEquals() {
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
FuzzyLikeThisQuery fltq1 = new FuzzyLikeThisQuery(10, analyzer);
|
||||
fltq1.addTerms("javi", "subject", 0.5f, 2);
|
||||
FuzzyLikeThisQuery fltq2 = new FuzzyLikeThisQuery(10, analyzer);
|
||||
|
|
|
@ -56,7 +56,7 @@ public class TestSpanRegexQuery extends LuceneTestCase {
|
|||
public void testSpanRegex() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
// doc.add(newField("field", "the quick brown fox jumps over the lazy dog",
|
||||
// Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -97,14 +97,14 @@ public class TestSpanRegexQuery extends LuceneTestCase {
|
|||
|
||||
// creating first index writer
|
||||
IndexWriter writerA = new IndexWriter(indexStoreA, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
writerA.addDocument(lDoc);
|
||||
writerA.optimize();
|
||||
writerA.close();
|
||||
|
||||
// creating second index writer
|
||||
IndexWriter writerB = new IndexWriter(indexStoreB, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
writerB.addDocument(lDoc2);
|
||||
writerB.optimize();
|
||||
writerB.close();
|
||||
|
|
|
@ -74,7 +74,7 @@ public class TestMoreLikeThis extends LuceneTestCase {
|
|||
Map<String,Float> originalValues = getOriginalValues();
|
||||
|
||||
MoreLikeThis mlt = new MoreLikeThis(reader);
|
||||
mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
mlt.setMinDocFreq(1);
|
||||
mlt.setMinTermFreq(1);
|
||||
mlt.setMinWordLen(1);
|
||||
|
@ -109,7 +109,7 @@ public class TestMoreLikeThis extends LuceneTestCase {
|
|||
private Map<String,Float> getOriginalValues() throws IOException {
|
||||
Map<String,Float> originalValues = new HashMap<String,Float>();
|
||||
MoreLikeThis mlt = new MoreLikeThis(reader);
|
||||
mlt.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
mlt.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
mlt.setMinDocFreq(1);
|
||||
mlt.setMinTermFreq(1);
|
||||
mlt.setMinWordLen(1);
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
|
||||
public class TestComplexPhraseQuery extends LuceneTestCase {
|
||||
Directory rd;
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
|
||||
DocData docsContent[] = { new DocData("john smith", "1"),
|
||||
new DocData("johathon smith", "2"),
|
||||
|
|
|
@ -43,7 +43,7 @@ public class TestExtendableQueryParser extends TestQueryParser {
|
|||
public QueryParser getParser(Analyzer a, Extensions extensions)
|
||||
throws Exception {
|
||||
if (a == null)
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
QueryParser qp = extensions == null ? new ExtendableQueryParser(
|
||||
TEST_VERSION_CURRENT, "field", a) : new ExtendableQueryParser(
|
||||
TEST_VERSION_CURRENT, "field", a, extensions);
|
||||
|
|
|
@ -125,7 +125,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
|
||||
public PrecedenceQueryParser getParser(Analyzer a) throws Exception {
|
||||
if (a == null)
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser();
|
||||
qp.setAnalyzer(a);
|
||||
qp.setDefaultOperator(Operator.OR);
|
||||
|
@ -171,7 +171,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
|
||||
public Query getQueryDOA(String query, Analyzer a) throws Exception {
|
||||
if (a == null)
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser();
|
||||
qp.setAnalyzer(a);
|
||||
qp.setDefaultOperator(Operator.AND);
|
||||
|
@ -232,7 +232,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
"+(title:dog title:cat) -author:\"bob dole\"");
|
||||
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer());
|
||||
qp.setAnalyzer(new MockAnalyzer(random));
|
||||
// make sure OR is the default:
|
||||
assertEquals(Operator.OR, qp.getDefaultOperator());
|
||||
qp.setDefaultOperator(Operator.AND);
|
||||
|
@ -246,7 +246,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testPunct() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
assertQueryEquals("a&b", a, "a&b");
|
||||
assertQueryEquals("a&&b", a, "a&&b");
|
||||
assertQueryEquals(".NET", a, ".NET");
|
||||
|
@ -266,7 +266,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
assertQueryEquals("term 1.0 1 2", null, "term");
|
||||
assertQueryEquals("term term1 term2", null, "term term term");
|
||||
|
||||
Analyzer a = new MockAnalyzer();
|
||||
Analyzer a = new MockAnalyzer(random);
|
||||
assertQueryEquals("3", a, "3");
|
||||
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
|
||||
assertQueryEquals("term term1 term2", a, "term term1 term2");
|
||||
|
@ -405,7 +405,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
final String defaultField = "default";
|
||||
final String monthField = "month";
|
||||
final String hourField = "hour";
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer());
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser(new MockAnalyzer(random));
|
||||
|
||||
Map<CharSequence, DateTools.Resolution> fieldMap = new HashMap<CharSequence,DateTools.Resolution>();
|
||||
// set a field specific date resolution
|
||||
|
@ -467,7 +467,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testEscaped() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
|
||||
assertQueryEquals("a\\-b:c", a, "a-b:c");
|
||||
assertQueryEquals("a\\+b:c", a, "a+b:c");
|
||||
|
@ -533,7 +533,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
|
||||
public void testBoost() throws Exception {
|
||||
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
|
||||
Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
|
||||
Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
|
||||
|
||||
PrecedenceQueryParser qp = new PrecedenceQueryParser();
|
||||
qp.setAnalyzer(oneStopAnalyzer);
|
||||
|
@ -548,7 +548,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
q = qp.parse("\"on\"^1.0", "field");
|
||||
assertNotNull(q);
|
||||
|
||||
q = getParser(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
|
||||
q = getParser(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)).parse("the^3",
|
||||
"field");
|
||||
assertNotNull(q);
|
||||
}
|
||||
|
@ -564,7 +564,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
public void testBooleanQuery() throws Exception {
|
||||
BooleanQuery.setMaxClauseCount(2);
|
||||
try {
|
||||
getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
|
||||
getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("one two three", "field");
|
||||
fail("ParseException expected due to too many boolean clauses");
|
||||
} catch (QueryNodeException expected) {
|
||||
// too many boolean clauses, so ParseException is expected
|
||||
|
@ -573,7 +573,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
|
||||
// LUCENE-792
|
||||
public void testNOT() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
assertQueryEquals("NOT foo AND bar", a, "-foo +bar");
|
||||
}
|
||||
|
||||
|
@ -582,7 +582,7 @@ public class TestPrecedenceQueryParser extends LuceneTestCase {
|
|||
* issue has been corrected.
|
||||
*/
|
||||
public void testPrecedence() throws Exception {
|
||||
PrecedenceQueryParser parser = getParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
PrecedenceQueryParser parser = getParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
Query query1 = parser.parse("A AND B OR C AND D", "field");
|
||||
Query query2 = parser.parse("(A AND B) OR (C AND D)", "field");
|
||||
assertEquals(query1, query2);
|
||||
|
|
|
@ -80,7 +80,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
String[] fields = { "b", "t" };
|
||||
StandardQueryParser mfqp = new StandardQueryParser();
|
||||
mfqp.setMultiFields(fields);
|
||||
mfqp.setAnalyzer(new MockAnalyzer());
|
||||
mfqp.setAnalyzer(new MockAnalyzer(random));
|
||||
|
||||
Query q = mfqp.parse("one", null);
|
||||
assertEquals("b:one t:one", q.toString());
|
||||
|
@ -150,7 +150,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
StandardQueryParser mfqp = new StandardQueryParser();
|
||||
mfqp.setMultiFields(fields);
|
||||
mfqp.setFieldsBoost(boosts);
|
||||
mfqp.setAnalyzer(new MockAnalyzer());
|
||||
mfqp.setAnalyzer(new MockAnalyzer(random));
|
||||
|
||||
// Check for simple
|
||||
Query q = mfqp.parse("one", null);
|
||||
|
@ -178,24 +178,24 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
public void testStaticMethod1() throws QueryNodeException {
|
||||
String[] fields = { "b", "t" };
|
||||
String[] queries = { "one", "two" };
|
||||
Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer());
|
||||
Query q = QueryParserUtil.parse(queries, fields, new MockAnalyzer(random));
|
||||
assertEquals("b:one t:two", q.toString());
|
||||
|
||||
String[] queries2 = { "+one", "+two" };
|
||||
q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse(queries2, fields, new MockAnalyzer(random));
|
||||
assertEquals("(+b:one) (+t:two)", q.toString());
|
||||
|
||||
String[] queries3 = { "one", "+two" };
|
||||
q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse(queries3, fields, new MockAnalyzer(random));
|
||||
assertEquals("b:one (+t:two)", q.toString());
|
||||
|
||||
String[] queries4 = { "one +more", "+two" };
|
||||
q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse(queries4, fields, new MockAnalyzer(random));
|
||||
assertEquals("(b:one +b:more) (+t:two)", q.toString());
|
||||
|
||||
String[] queries5 = { "blah" };
|
||||
try {
|
||||
q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse(queries5, fields, new MockAnalyzer(random));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected exception, array length differs
|
||||
|
@ -219,15 +219,15 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
|
||||
BooleanClause.Occur.MUST_NOT };
|
||||
Query q = QueryParserUtil.parse("one", fields, flags,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
assertEquals("+b:one -t:one", q.toString());
|
||||
|
||||
q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
|
||||
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
||||
|
||||
try {
|
||||
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
|
||||
q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected exception, array length differs
|
||||
|
@ -240,19 +240,19 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
BooleanClause.Occur.MUST_NOT };
|
||||
StandardQueryParser parser = new StandardQueryParser();
|
||||
parser.setMultiFields(fields);
|
||||
parser.setAnalyzer(new MockAnalyzer());
|
||||
parser.setAnalyzer(new MockAnalyzer(random));
|
||||
|
||||
Query q = QueryParserUtil.parse("one", fields, flags,
|
||||
new MockAnalyzer());// , fields, flags, new
|
||||
new MockAnalyzer(random));// , fields, flags, new
|
||||
// MockAnalyzer());
|
||||
assertEquals("+b:one -t:one", q.toString());
|
||||
|
||||
q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse("one two", fields, flags, new MockAnalyzer(random));
|
||||
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
||||
|
||||
try {
|
||||
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
|
||||
q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer());
|
||||
q = QueryParserUtil.parse("blah", fields, flags2, new MockAnalyzer(random));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected exception, array length differs
|
||||
|
@ -265,13 +265,13 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
|
||||
BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD };
|
||||
Query q = QueryParserUtil.parse(queries, fields, flags,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
assertEquals("+f1:one -f2:two f3:three", q.toString());
|
||||
|
||||
try {
|
||||
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
|
||||
q = QueryParserUtil
|
||||
.parse(queries, fields, flags2, new MockAnalyzer());
|
||||
.parse(queries, fields, flags2, new MockAnalyzer(random));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected exception, array length differs
|
||||
|
@ -284,13 +284,13 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
BooleanClause.Occur[] flags = { BooleanClause.Occur.MUST,
|
||||
BooleanClause.Occur.MUST_NOT };
|
||||
Query q = QueryParserUtil.parse(queries, fields, flags,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
assertEquals("+b:one -t:two", q.toString());
|
||||
|
||||
try {
|
||||
BooleanClause.Occur[] flags2 = { BooleanClause.Occur.MUST };
|
||||
q = QueryParserUtil
|
||||
.parse(queries, fields, flags2, new MockAnalyzer());
|
||||
.parse(queries, fields, flags2, new MockAnalyzer(random));
|
||||
fail();
|
||||
} catch (IllegalArgumentException e) {
|
||||
// expected exception, array length differs
|
||||
|
@ -316,7 +316,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testStopWordSearching() throws Exception {
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
Directory ramDir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
|
||||
Document doc = new Document();
|
||||
|
@ -342,7 +342,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
* Return empty tokens for field "f1".
|
||||
*/
|
||||
private static final class AnalyzerReturningNull extends Analyzer {
|
||||
MockAnalyzer stdAnalyzer = new MockAnalyzer();
|
||||
MockAnalyzer stdAnalyzer = new MockAnalyzer(random);
|
||||
|
||||
public AnalyzerReturningNull() {
|
||||
}
|
||||
|
|
|
@ -191,7 +191,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public StandardQueryParser getParser(Analyzer a) throws Exception {
|
||||
if (a == null)
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(a);
|
||||
|
||||
|
@ -281,7 +281,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public Query getQueryDOA(String query, Analyzer a) throws Exception {
|
||||
if (a == null)
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(a);
|
||||
qp.setDefaultOperator(Operator.AND);
|
||||
|
@ -301,7 +301,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testConstantScoreAutoRewrite() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
StandardQueryParser qp = new StandardQueryParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
Query q = qp.parse("foo*bar", "field");
|
||||
assertTrue(q instanceof WildcardQuery);
|
||||
assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((MultiTermQuery) q).getRewriteMethod());
|
||||
|
@ -410,9 +410,9 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
public void testSimple() throws Exception {
|
||||
assertQueryEquals("\"term germ\"~2", null, "\"term germ\"~2");
|
||||
assertQueryEquals("term term term", null, "term term term");
|
||||
assertQueryEquals("t<EFBFBD>rm term term", new MockAnalyzer(MockTokenizer.WHITESPACE, false),
|
||||
assertQueryEquals("t<EFBFBD>rm term term", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false),
|
||||
"t<EFBFBD>rm term term");
|
||||
assertQueryEquals("<EFBFBD>mlaut", new MockAnalyzer(MockTokenizer.WHITESPACE, false), "<EFBFBD>mlaut");
|
||||
assertQueryEquals("<EFBFBD>mlaut", new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), "<EFBFBD>mlaut");
|
||||
|
||||
// FIXME: change MockAnalyzer to not extend CharTokenizer for this test
|
||||
//assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
|
||||
|
@ -470,7 +470,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testPunct() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
assertQueryEquals("a&b", a, "a&b");
|
||||
assertQueryEquals("a&&b", a, "a&&b");
|
||||
assertQueryEquals(".NET", a, ".NET");
|
||||
|
@ -491,7 +491,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
assertQueryEquals("term 1.0 1 2", null, "term");
|
||||
assertQueryEquals("term term1 term2", null, "term term term");
|
||||
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
assertQueryEquals("3", a, "3");
|
||||
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
|
||||
assertQueryEquals("term term1 term2", a, "term term1 term2");
|
||||
|
@ -726,7 +726,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testEscaped() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
|
||||
/*
|
||||
* assertQueryEquals("\\[brackets", a, "\\[brackets");
|
||||
|
@ -825,7 +825,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testQueryStringEscaping() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
|
||||
assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
|
||||
assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
|
||||
|
@ -866,7 +866,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
@Ignore("contrib queryparser shouldn't escape wildcard terms")
|
||||
public void testEscapedWildcard() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
|
||||
WildcardQuery q = new WildcardQuery(new Term("field", "foo\\?ba?r"));
|
||||
assertEquals(q, qp.parse("foo\\?ba?r", "field"));
|
||||
|
@ -904,7 +904,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public void testBoost() throws Exception {
|
||||
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(BasicAutomata.makeString("on"));
|
||||
Analyzer oneStopAnalyzer = new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true);
|
||||
Analyzer oneStopAnalyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true);
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(oneStopAnalyzer);
|
||||
|
||||
|
@ -920,7 +920,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
assertNotNull(q);
|
||||
|
||||
StandardQueryParser qp2 = new StandardQueryParser();
|
||||
qp2.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
qp2.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
|
||||
q = qp2.parse("the^3", "field");
|
||||
// "the" is a stop word so the result is an empty query:
|
||||
|
@ -950,7 +950,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public void testCustomQueryParserWildcard() {
|
||||
try {
|
||||
new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
|
||||
new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("a?t", "contents");
|
||||
fail("Wildcard queries should not be allowed");
|
||||
} catch (QueryNodeException expected) {
|
||||
// expected exception
|
||||
|
@ -959,7 +959,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public void testCustomQueryParserFuzzy() throws Exception {
|
||||
try {
|
||||
new QPTestParser(new MockAnalyzer(MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
|
||||
new QPTestParser(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).parse("xunit~", "contents");
|
||||
fail("Fuzzy queries should not be allowed");
|
||||
} catch (QueryNodeException expected) {
|
||||
// expected exception
|
||||
|
@ -970,7 +970,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
BooleanQuery.setMaxClauseCount(2);
|
||||
try {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
|
||||
qp.parse("one two three", "field");
|
||||
fail("ParseException expected due to too many boolean clauses");
|
||||
|
@ -984,7 +984,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
*/
|
||||
public void testPrecedence() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
|
||||
Query query1 = qp.parse("A AND B OR C AND D", "field");
|
||||
Query query2 = qp.parse("+A +B +C +D", "field");
|
||||
|
@ -995,7 +995,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
// Todo: Convert from DateField to DateUtil
|
||||
// public void testLocalDateFormat() throws IOException, QueryNodeException {
|
||||
// Directory ramDir = newDirectory();
|
||||
// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
// IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
|
||||
// addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
|
||||
// iw.close();
|
||||
|
@ -1116,7 +1116,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
public void testStopwords() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
CharacterRunAutomaton stopSet = new CharacterRunAutomaton(new RegExp("the|foo").toAutomaton());
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.SIMPLE, true, stopSet, true));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.SIMPLE, true, stopSet, true));
|
||||
|
||||
Query result = qp.parse("a:the OR a:foo", "a");
|
||||
assertNotNull("result is null and it shouldn't be", result);
|
||||
|
@ -1140,7 +1140,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
public void testPositionIncrement() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(
|
||||
new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true));
|
||||
|
||||
qp.setEnablePositionIncrements(true);
|
||||
|
||||
|
@ -1161,7 +1161,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
|
||||
public void testMatchAllDocs() throws Exception {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
|
||||
assertEquals(new MatchAllDocsQuery(), qp.parse("*:*", "field"));
|
||||
assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)", "field"));
|
||||
|
@ -1173,7 +1173,7 @@ public class TestQPHelper extends LuceneTestCase {
|
|||
private void assertHits(int expected, String query, IndexSearcher is)
|
||||
throws IOException, QueryNodeException {
|
||||
StandardQueryParser qp = new StandardQueryParser();
|
||||
qp.setAnalyzer(new MockAnalyzer(MockTokenizer.WHITESPACE, false));
|
||||
qp.setAnalyzer(new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
qp.setLocale(Locale.ENGLISH);
|
||||
|
||||
Query q = qp.parse(query, "date");
|
||||
|
|
|
@ -41,7 +41,7 @@ public class SingleFieldTestDb {
|
|||
fieldName = fName;
|
||||
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT,
|
||||
new MockAnalyzer()));
|
||||
new MockAnalyzer(random)));
|
||||
for (int j = 0; j < docs.length; j++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
|
|
@ -71,7 +71,7 @@ public class TestCartesian extends LuceneTestCase {
|
|||
super.setUp();
|
||||
directory = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
setUpPlotter( 2, 15);
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ public class TestDistance extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = newDirectory();
|
||||
writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addData(writer);
|
||||
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ public class TestDirectSpellChecker extends LuceneTestCase {
|
|||
spellChecker.setMinQueryLength(0);
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
new MockAnalyzer(MockTokenizer.SIMPLE, true));
|
||||
new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
|
||||
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -93,7 +93,7 @@ public class TestDirectSpellChecker extends LuceneTestCase {
|
|||
public void testOptions() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
new MockAnalyzer(MockTokenizer.SIMPLE, true));
|
||||
new MockAnalyzer(random, MockTokenizer.SIMPLE, true));
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(newField("text", "foobar", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
|
|
@ -46,7 +46,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
store = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
|
||||
Document doc;
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
//create a user index
|
||||
userindex = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TestWordnet extends LuceneTestCase {
|
|||
|
||||
private void assertExpandsTo(String term, String expected[]) throws IOException {
|
||||
Query expandedQuery = SynExpand.expand(term, searcher, new
|
||||
MockAnalyzer(), "field", 1F);
|
||||
MockAnalyzer(random), "field", 1F);
|
||||
BooleanQuery expectedQuery = new BooleanQuery();
|
||||
for (String t : expected)
|
||||
expectedQuery.add(new TermQuery(new Term("field", t)),
|
||||
|
|
|
@ -49,7 +49,7 @@ public class TestParser extends LuceneTestCase {
|
|||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
// TODO: rewrite test (this needs to set QueryParser.enablePositionIncrements, too, for work with CURRENT):
|
||||
Analyzer analyzer=new MockAnalyzer(MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false);
|
||||
Analyzer analyzer=new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET, false);
|
||||
//initialize the parser
|
||||
builder=new CorePlusExtensionsParser("contents",analyzer);
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.xml.sax.SAXException;
|
|||
public class TestQueryTemplateManager extends LuceneTestCase {
|
||||
|
||||
CoreParser builder;
|
||||
Analyzer analyzer=new MockAnalyzer();
|
||||
Analyzer analyzer=new MockAnalyzer(random);
|
||||
private IndexSearcher searcher;
|
||||
private Directory dir;
|
||||
|
||||
|
|
|
@ -19,10 +19,10 @@ package org.apache.lucene.analysis;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.index.Payload;
|
||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||
|
||||
/**
|
||||
|
@ -33,67 +33,51 @@ public final class MockAnalyzer extends Analyzer {
|
|||
private final boolean lowerCase;
|
||||
private final CharacterRunAutomaton filter;
|
||||
private final boolean enablePositionIncrements;
|
||||
private final boolean payload;
|
||||
private int positionIncrementGap;
|
||||
|
||||
/**
|
||||
* Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean)
|
||||
* MockAnalyzer(runAutomaton, lowerCase, filter, enablePositionIncrements, true}).
|
||||
*/
|
||||
public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements) {
|
||||
this(runAutomaton, lowerCase, filter, enablePositionIncrements, true);
|
||||
}
|
||||
private final Random random;
|
||||
private Map<String,Integer> previousMappings = new HashMap<String,Integer>();
|
||||
|
||||
/**
|
||||
* Creates a new MockAnalyzer.
|
||||
*
|
||||
* @param random Random for payloads behavior
|
||||
* @param runAutomaton DFA describing how tokenization should happen (e.g. [a-zA-Z]+)
|
||||
* @param lowerCase true if the tokenizer should lowercase terms
|
||||
* @param filter DFA describing how terms should be filtered (set of stopwords, etc)
|
||||
* @param enablePositionIncrements true if position increments should reflect filtered terms.
|
||||
* @param payload if payloads should be added containing the positions (for testing)
|
||||
*/
|
||||
public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements, boolean payload) {
|
||||
public MockAnalyzer(Random random, CharacterRunAutomaton runAutomaton, boolean lowerCase, CharacterRunAutomaton filter, boolean enablePositionIncrements) {
|
||||
this.random = random;
|
||||
this.runAutomaton = runAutomaton;
|
||||
this.lowerCase = lowerCase;
|
||||
this.filter = filter;
|
||||
this.enablePositionIncrements = enablePositionIncrements;
|
||||
this.payload = payload;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean)
|
||||
* MockAnalyzer(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, true}).
|
||||
* Calls {@link #MockAnalyzer(Random, CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean)
|
||||
* MockAnalyzer(random, runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false}).
|
||||
*/
|
||||
public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase) {
|
||||
this(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, true);
|
||||
public MockAnalyzer(Random random, CharacterRunAutomaton runAutomaton, boolean lowerCase) {
|
||||
this(random, runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean)
|
||||
* MockAnalyzer(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, payload}).
|
||||
*/
|
||||
public MockAnalyzer(CharacterRunAutomaton runAutomaton, boolean lowerCase, boolean payload) {
|
||||
this(runAutomaton, lowerCase, MockTokenFilter.EMPTY_STOPSET, false, payload);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Whitespace-lowercasing analyzer with no stopwords removal.
|
||||
* <p>
|
||||
* Calls {@link #MockAnalyzer(CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean, boolean)
|
||||
* MockAnalyzer(MockTokenizer.WHITESPACE, true, MockTokenFilter.EMPTY_STOPSET, false, true}).
|
||||
* Calls {@link #MockAnalyzer(Random, CharacterRunAutomaton, boolean, CharacterRunAutomaton, boolean)
|
||||
* MockAnalyzer(random, MockTokenizer.WHITESPACE, true, MockTokenFilter.EMPTY_STOPSET, false}).
|
||||
*/
|
||||
public MockAnalyzer() {
|
||||
this(MockTokenizer.WHITESPACE, true);
|
||||
public MockAnalyzer(Random random) {
|
||||
this(random, MockTokenizer.WHITESPACE, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
MockTokenizer tokenizer = new MockTokenizer(reader, runAutomaton, lowerCase);
|
||||
TokenFilter filt = new MockTokenFilter(tokenizer, filter, enablePositionIncrements);
|
||||
if (payload){
|
||||
filt = new SimplePayloadFilter(filt, fieldName);
|
||||
}
|
||||
filt = maybePayload(filt, fieldName);
|
||||
return filt;
|
||||
}
|
||||
|
||||
|
@ -105,15 +89,19 @@ public final class MockAnalyzer extends Analyzer {
|
|||
@Override
|
||||
public TokenStream reusableTokenStream(String fieldName, Reader reader)
|
||||
throws IOException {
|
||||
SavedStreams saved = (SavedStreams) getPreviousTokenStream();
|
||||
Map<String,SavedStreams> map = (Map) getPreviousTokenStream();
|
||||
if (map == null) {
|
||||
map = new HashMap<String,SavedStreams>();
|
||||
setPreviousTokenStream(map);
|
||||
}
|
||||
|
||||
SavedStreams saved = map.get(fieldName);
|
||||
if (saved == null) {
|
||||
saved = new SavedStreams();
|
||||
saved.tokenizer = new MockTokenizer(reader, runAutomaton, lowerCase);
|
||||
saved.filter = new MockTokenFilter(saved.tokenizer, filter, enablePositionIncrements);
|
||||
if (payload){
|
||||
saved.filter = new SimplePayloadFilter(saved.filter, fieldName);
|
||||
}
|
||||
setPreviousTokenStream(saved);
|
||||
saved.filter = maybePayload(saved.filter, fieldName);
|
||||
map.put(fieldName, saved);
|
||||
return saved.filter;
|
||||
} else {
|
||||
saved.tokenizer.reset(reader);
|
||||
|
@ -122,6 +110,28 @@ public final class MockAnalyzer extends Analyzer {
|
|||
}
|
||||
}
|
||||
|
||||
private synchronized TokenFilter maybePayload(TokenFilter stream, String fieldName) {
|
||||
Integer val = previousMappings.get(fieldName);
|
||||
if (val == null) {
|
||||
switch(random.nextInt(3)) {
|
||||
case 0: val = -1; // no payloads
|
||||
break;
|
||||
case 1: val = Integer.MAX_VALUE; // variable length payload
|
||||
break;
|
||||
case 2: val = random.nextInt(12); // fixed length payload
|
||||
break;
|
||||
}
|
||||
previousMappings.put(fieldName, val); // save it so we are consistent for this field
|
||||
}
|
||||
|
||||
if (val == -1)
|
||||
return stream;
|
||||
else if (val == Integer.MAX_VALUE)
|
||||
return new MockVariableLengthPayloadFilter(random, stream);
|
||||
else
|
||||
return new MockFixedLengthPayloadFilter(random, stream, val);
|
||||
}
|
||||
|
||||
public void setPositionIncrementGap(int positionIncrementGap){
|
||||
this.positionIncrementGap = positionIncrementGap;
|
||||
}
|
||||
|
@ -131,35 +141,3 @@ public final class MockAnalyzer extends Analyzer {
|
|||
return positionIncrementGap;
|
||||
}
|
||||
}
|
||||
|
||||
final class SimplePayloadFilter extends TokenFilter {
|
||||
String fieldName;
|
||||
int pos;
|
||||
final PayloadAttribute payloadAttr;
|
||||
final CharTermAttribute termAttr;
|
||||
|
||||
public SimplePayloadFilter(TokenStream input, String fieldName) {
|
||||
super(input);
|
||||
this.fieldName = fieldName;
|
||||
pos = 0;
|
||||
payloadAttr = input.addAttribute(PayloadAttribute.class);
|
||||
termAttr = input.addAttribute(CharTermAttribute.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (input.incrementToken()) {
|
||||
payloadAttr.setPayload(new Payload(("pos: " + pos).getBytes()));
|
||||
pos++;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
super.reset();
|
||||
pos = 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
package org.apache.lucene.analysis;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.index.Payload;
|
||||
|
||||
final class MockFixedLengthPayloadFilter extends TokenFilter {
|
||||
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
|
||||
private final Random random;
|
||||
private final byte[] bytes;
|
||||
private final Payload payload;
|
||||
|
||||
MockFixedLengthPayloadFilter(Random random, TokenStream in, int length) {
|
||||
super(in);
|
||||
this.random = random;
|
||||
this.bytes = new byte[length];
|
||||
this.payload = new Payload(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (input.incrementToken()) {
|
||||
random.nextBytes(bytes);
|
||||
payloadAtt.setPayload(payload);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
package org.apache.lucene.analysis;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.index.Payload;
|
||||
|
||||
final class MockVariableLengthPayloadFilter extends TokenFilter {
|
||||
private static final int MAXLENGTH = 129;
|
||||
|
||||
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
|
||||
private final Random random;
|
||||
private final byte[] bytes = new byte[MAXLENGTH];
|
||||
private final Payload payload;
|
||||
|
||||
MockVariableLengthPayloadFilter(Random random, TokenStream in) {
|
||||
super(in);
|
||||
this.random = random;
|
||||
this.payload = new Payload(bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean incrementToken() throws IOException {
|
||||
if (input.incrementToken()) {
|
||||
random.nextBytes(bytes);
|
||||
payload.setData(bytes, 0, random.nextInt(MAXLENGTH));
|
||||
payloadAtt.setPayload(payload);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
|
@ -30,6 +31,8 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.SimilarityProvider;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import static org.apache.lucene.util.LuceneTestCase.TEST_VERSION_CURRENT;
|
||||
|
||||
class DocHelper {
|
||||
|
@ -218,9 +221,9 @@ class DocHelper {
|
|||
* @param doc
|
||||
* @throws IOException
|
||||
*/
|
||||
public static SegmentInfo writeDoc(Directory dir, Document doc) throws IOException
|
||||
public static SegmentInfo writeDoc(Random random, Directory dir, Document doc) throws IOException
|
||||
{
|
||||
return writeDoc(dir, new MockAnalyzer(MockTokenizer.WHITESPACE, false), null, doc);
|
||||
return writeDoc(random, dir, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false), null, doc);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -233,8 +236,8 @@ class DocHelper {
|
|||
* @param doc
|
||||
* @throws IOException
|
||||
*/
|
||||
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, SimilarityProvider similarity, Document doc) throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
public static SegmentInfo writeDoc(Random random, Directory dir, Analyzer analyzer, SimilarityProvider similarity, Document doc) throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig( /* LuceneTestCase.newIndexWriterConfig(random, */
|
||||
TEST_VERSION_CURRENT, analyzer).setSimilarityProvider(similarity));
|
||||
//writer.setUseCompoundFile(false);
|
||||
writer.addDocument(doc);
|
||||
|
|
|
@ -68,7 +68,7 @@ public class RandomIndexWriter implements Closeable {
|
|||
|
||||
/** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT and MockAnalyzer */
|
||||
public RandomIndexWriter(Random r, Directory dir) throws IOException {
|
||||
this(r, dir, LuceneTestCase.newIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
this(r, dir, LuceneTestCase.newIndexWriterConfig(r, LuceneTestCase.TEST_VERSION_CURRENT, new MockAnalyzer(r)));
|
||||
}
|
||||
|
||||
/** create a RandomIndexWriter with a random config: Uses TEST_VERSION_CURRENT */
|
||||
|
|
|
@ -166,7 +166,7 @@ public class QueryUtils {
|
|||
throws IOException {
|
||||
Directory d = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < numDeletedDocs; i++) {
|
||||
w.addDocument(new Document());
|
||||
}
|
||||
|
|
|
@ -43,13 +43,13 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
public class TestDemo extends LuceneTestCase {
|
||||
|
||||
public void testDemo() throws IOException, ParseException {
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
|
||||
// Store the index in memory:
|
||||
Directory directory = newDirectory();
|
||||
// To store an index on disk, use this instead:
|
||||
//Directory directory = FSDirectory.open("/tmp/testindex");
|
||||
RandomIndexWriter iwriter = new RandomIndexWriter(random, directory);
|
||||
RandomIndexWriter iwriter = new RandomIndexWriter(random, directory, analyzer);
|
||||
iwriter.w.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
String longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm";
|
||||
|
|
|
@ -509,7 +509,7 @@ public class TestExternalCodecs extends LuceneTestCase {
|
|||
dir.setCheckIndexOnClose(false); // we use a custom codec provider
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, true)).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setCodecProvider(provider).
|
||||
setMergePolicy(newLogMergePolicy(3))
|
||||
);
|
||||
|
|
|
@ -90,7 +90,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
doc.add(idField);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new MyMergeScheduler())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new MyMergeScheduler())
|
||||
.setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
|
||||
.setMergePolicy(newLogMergePolicy()));
|
||||
LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
|
|
|
@ -72,7 +72,7 @@ public class TestSearch extends LuceneTestCase {
|
|||
private void doTestSearch(Random random, PrintWriter out, boolean useCompoundFile)
|
||||
throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
|||
|
||||
private void doTest(Random random, PrintWriter out, boolean useCompoundFiles) throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
|
||||
final MergePolicy mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
|
|
@ -29,7 +29,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
|
||||
/** Test a configuration that behaves a lot like WhitespaceAnalyzer */
|
||||
public void testWhitespace() throws Exception {
|
||||
Analyzer a = new MockAnalyzer();
|
||||
Analyzer a = new MockAnalyzer(random);
|
||||
assertAnalyzesTo(a, "A bc defg hiJklmn opqrstuv wxy z ",
|
||||
new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
|
||||
assertAnalyzesToReuse(a, "aba cadaba shazam",
|
||||
|
@ -40,7 +40,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
|
||||
/** Test a configuration that behaves a lot like SimpleAnalyzer */
|
||||
public void testSimple() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true);
|
||||
assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ",
|
||||
new String[] { "a", "bc", "defg", "hijklmn", "opqrstuv", "wxy", "z" });
|
||||
assertAnalyzesToReuse(a, "aba4cadaba-Shazam",
|
||||
|
@ -51,7 +51,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
|
||||
/** Test a configuration that behaves a lot like KeywordAnalyzer */
|
||||
public void testKeyword() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.KEYWORD, false);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.KEYWORD, false);
|
||||
assertAnalyzesTo(a, "a-bc123 defg+hijklmn567opqrstuv78wxy_z ",
|
||||
new String[] { "a-bc123 defg+hijklmn567opqrstuv78wxy_z " });
|
||||
assertAnalyzesToReuse(a, "aba4cadaba-Shazam",
|
||||
|
@ -62,13 +62,13 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
|
||||
/** Test a configuration that behaves a lot like StopAnalyzer */
|
||||
public void testStop() throws Exception {
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
assertAnalyzesTo(a, "the quick brown a fox",
|
||||
new String[] { "quick", "brown", "fox" },
|
||||
new int[] { 2, 1, 2 });
|
||||
|
||||
// disable positions
|
||||
a = new MockAnalyzer(MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false);
|
||||
a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, false);
|
||||
assertAnalyzesTo(a, "the quick brown a fox",
|
||||
new String[] { "quick", "brown", "fox" },
|
||||
new int[] { 1, 1, 1 });
|
||||
|
@ -81,7 +81,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
BasicOperations.complement(
|
||||
Automaton.union(
|
||||
Arrays.asList(BasicAutomata.makeString("foo"), BasicAutomata.makeString("bar")))));
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.SIMPLE, true, keepWords, true);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, keepWords, true);
|
||||
assertAnalyzesTo(a, "quick foo brown bar bar fox foo",
|
||||
new String[] { "foo", "bar", "bar", "foo" },
|
||||
new int[] { 2, 2, 1, 2 });
|
||||
|
@ -90,7 +90,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
|
|||
/** Test a configuration that behaves a lot like LengthFilter */
|
||||
public void testLength() throws Exception {
|
||||
CharacterRunAutomaton length5 = new CharacterRunAutomaton(new RegExp(".{5,}").toAutomaton());
|
||||
Analyzer a = new MockAnalyzer(MockTokenizer.WHITESPACE, true, length5, true);
|
||||
Analyzer a = new MockAnalyzer(random, MockTokenizer.WHITESPACE, true, length5, true);
|
||||
assertAnalyzesTo(a, "ok toolong fine notfine",
|
||||
new String[] { "ok", "fine" },
|
||||
new int[] { 1, 2 });
|
||||
|
|
|
@ -138,7 +138,7 @@ public class Test2BTerms extends LuceneTestCase {
|
|||
Directory dir = newFSDirectory(_TestUtil.getTempDir("2BTerms"));
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).
|
||||
setRAMBufferSizeMB(256.0).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
|
|
|
@ -53,7 +53,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
// add 100 documents
|
||||
|
@ -64,7 +64,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
|
@ -73,14 +73,14 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
assertEquals(40, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
// add 40 documents in compound files
|
||||
addDocs2(writer, 50);
|
||||
assertEquals(50, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
// test doc count before segments are merged
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
assertEquals(100, writer.maxDoc());
|
||||
writer.addIndexes(aux, aux2);
|
||||
assertEquals(190, writer.maxDoc());
|
||||
|
@ -95,14 +95,14 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
// now add another set in.
|
||||
Directory aux3 = newDirectory();
|
||||
writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
// add 40 documents
|
||||
addDocs(writer, 40);
|
||||
assertEquals(40, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
// test doc count before segments are merged/index is optimized
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
assertEquals(190, writer.maxDoc());
|
||||
writer.addIndexes(aux3);
|
||||
assertEquals(230, writer.maxDoc());
|
||||
|
@ -116,7 +116,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
verifyTermDocs(dir, new Term("content", "bbb"), 50);
|
||||
|
||||
// now optimize it.
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -129,11 +129,11 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
// now add a single document
|
||||
Directory aux4 = newDirectory();
|
||||
writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocs2(writer, 1);
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
assertEquals(230, writer.maxDoc());
|
||||
writer.addIndexes(aux4);
|
||||
assertEquals(231, writer.maxDoc());
|
||||
|
@ -156,7 +156,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
Directory aux = newDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer.addIndexes(aux);
|
||||
|
||||
|
@ -194,7 +194,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
Directory aux = newDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
|
||||
// Adds 10 docs, then replaces them with another 10
|
||||
// docs, so 10 pending deletes:
|
||||
|
@ -232,7 +232,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
Directory aux = newDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
|
||||
// Adds 10 docs, then replaces them with another 10
|
||||
// docs, so 10 pending deletes:
|
||||
|
@ -273,7 +273,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
// add 100 documents
|
||||
addDocs(writer, 100);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
|
@ -281,7 +281,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(1000).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
|
@ -291,7 +291,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
writer.close();
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(1000).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
|
@ -299,7 +299,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
addDocs(writer, 100);
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
try {
|
||||
// cannot add self
|
||||
writer.addIndexes(aux, dir);
|
||||
|
@ -329,7 +329,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -358,7 +358,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(9).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -387,7 +387,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -422,7 +422,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(4).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -448,7 +448,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = newWriter(
|
||||
aux2,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(100).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
@ -475,7 +475,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(6).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -536,7 +536,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
private void setUpDirs(Directory dir, Directory aux) throws IOException {
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
|
||||
// add 1000 documents in 1 segment
|
||||
addDocs(writer, 1000);
|
||||
assertEquals(1000, writer.maxDoc());
|
||||
|
@ -545,7 +545,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(1000).
|
||||
setMergePolicy(newLogMergePolicy(false, 10))
|
||||
|
@ -556,7 +556,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
writer.close();
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(1000).
|
||||
setMergePolicy(newLogMergePolicy(false, 10))
|
||||
|
@ -575,7 +575,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
lmp.setUseCompoundFile(false);
|
||||
lmp.setMergeFactor(100);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(5).setMergePolicy(lmp));
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -603,7 +603,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
lmp.setUseCompoundFile(false);
|
||||
lmp.setMergeFactor(4);
|
||||
writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
|
||||
writer.addIndexes(dir);
|
||||
writer.close();
|
||||
|
@ -636,14 +636,14 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
NUM_COPY = numCopy;
|
||||
dir = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2));
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++)
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
dir2 = newDirectory();
|
||||
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer2.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer2.commit();
|
||||
|
||||
|
@ -944,7 +944,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
Directory[] dirs = new Directory[2];
|
||||
for (int i = 0; i < dirs.length; i++) {
|
||||
dirs[i] = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dirs[i], conf);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("id", "myid", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
|
||||
|
@ -952,7 +952,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
writer.close();
|
||||
}
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dirs[0], conf);
|
||||
|
||||
// Now delete the document
|
||||
|
@ -992,7 +992,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider(
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodecProvider(
|
||||
provider));
|
||||
// add 100 documents
|
||||
addDocs3(writer, 100);
|
||||
|
@ -1003,7 +1003,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
aux,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setCodecProvider(provider).
|
||||
setMaxBufferedDocs(10).
|
||||
|
@ -1017,7 +1017,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
|
||||
writer = newWriter(
|
||||
aux2,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setCodecProvider(provider)
|
||||
);
|
||||
|
@ -1030,7 +1030,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
// test doc count before segments are merged
|
||||
writer = newWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setCodecProvider(provider)
|
||||
);
|
||||
|
@ -1063,7 +1063,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
Directory[] dirs = new Directory[2];
|
||||
for (int i = 0; i < dirs.length; i++) {
|
||||
dirs[i] = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("c", "v", Store.YES, Index.ANALYZED, TermVector.YES));
|
||||
w.addDocument(d);
|
||||
|
@ -1073,7 +1073,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
|||
IndexReader[] readers = new IndexReader[] { IndexReader.open(dirs[0]), IndexReader.open(dirs[1]) };
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
|
||||
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
|
||||
lmp.setUseCompoundFile(true);
|
||||
lmp.setNoCFSRatio(1.0); // Force creation of CFS
|
||||
|
|
|
@ -127,7 +127,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
TimedThread[] threads = new TimedThread[4];
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(7);
|
||||
((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
|
||||
IndexWriter writer = new MockIndexWriter(directory, conf);
|
||||
|
|
|
@ -132,7 +132,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
try {
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
|
||||
} catch (IndexFormatTooOldException e) {
|
||||
// pass
|
||||
|
@ -174,7 +174,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Directory dir = newFSDirectory(oldIndxeDir);
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
@ -194,7 +194,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
Directory targetDir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.addIndexes(dir);
|
||||
w.close();
|
||||
|
||||
|
@ -215,7 +215,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
Directory targetDir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.addIndexes(reader);
|
||||
w.close();
|
||||
reader.close();
|
||||
|
@ -268,7 +268,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void searchIndex(File indexDir, String oldName) throws IOException {
|
||||
//QueryParser parser = new QueryParser("contents", new MockAnalyzer());
|
||||
//QueryParser parser = new QueryParser("contents", new MockAnalyzer(random));
|
||||
//Query query = parser.parse("handle:1");
|
||||
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
|
@ -340,7 +340,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
Directory dir = newFSDirectory(oldIndexDir);
|
||||
// open writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
// add 10 docs
|
||||
for(int i=0;i<10;i++) {
|
||||
|
@ -385,7 +385,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -430,7 +430,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -451,7 +451,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
_TestUtil.rmDir(indexDir);
|
||||
Directory dir = newFSDirectory(indexDir);
|
||||
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10);
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
||||
|
@ -462,7 +462,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// open fresh writer so we get no prx file in the added segment
|
||||
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10);
|
||||
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10);
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(doCFS);
|
||||
writer = new IndexWriter(dir, conf);
|
||||
addNoProxDoc(writer);
|
||||
|
@ -498,7 +498,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(-1).
|
||||
setRAMBufferSizeMB(16.0).
|
||||
setMergePolicy(mergePolicy)
|
||||
|
|
|
@ -34,7 +34,7 @@ public class TestCheckIndex extends LuceneTestCase {
|
|||
|
||||
public void testDeletedDocs() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
for(int i=0;i<19;i++) {
|
||||
|
|
|
@ -321,7 +321,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
public void testSepPositionAfterMerge() throws IOException {
|
||||
final Directory dir = newDirectory();
|
||||
final IndexWriterConfig config = newIndexWriterConfig(Version.LUCENE_31,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
config.setCodecProvider(new MockSepCodecs());
|
||||
final IndexWriter writer = new IndexWriter(dir, config);
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
FailOnlyOnFlush failure = new FailOnlyOnFlush();
|
||||
directory.failOn(failure);
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||
|
@ -130,7 +130,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
// start:
|
||||
mp.setMinMergeDocs(1000);
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMergePolicy(mp));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
|
@ -169,7 +169,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
public void testNoExtraFiles() throws IOException {
|
||||
MockDirectoryWrapper directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
|
@ -189,7 +189,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
// Reopen
|
||||
writer = new IndexWriter(directory, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
}
|
||||
|
@ -207,7 +207,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(100))
|
||||
);
|
||||
|
@ -240,7 +240,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
// Reopen
|
||||
writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMergePolicy(newLogMergePolicy(100))
|
||||
);
|
||||
|
|
|
@ -35,7 +35,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
public void testSameFieldNumbersAcrossSegments() throws Exception {
|
||||
for (int i = 0; i < 2; i++) {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
|
||||
Document d1 = new Document();
|
||||
d1.add(new Field("f1", "first field", Store.YES, Index.ANALYZED, TermVector.NO));
|
||||
|
@ -44,7 +44,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
if (i == 1) {
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
} else {
|
||||
writer.commit();
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
assertEquals("f3", fis2.fieldInfo(2).name);
|
||||
assertEquals("f4", fis2.fieldInfo(3).name);
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -96,7 +96,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
public void testAddIndexes() throws Exception {
|
||||
Directory dir1 = newDirectory();
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
|
||||
Document d1 = new Document();
|
||||
d1.add(new Field("f1", "first field", Store.YES, Index.ANALYZED, TermVector.NO));
|
||||
|
@ -104,7 +104,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
writer.addDocument(d1);
|
||||
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
|
||||
Document d2 = new Document();
|
||||
d2.add(new Field("f2", "second field", Store.YES, Index.ANALYZED, TermVector.NO));
|
||||
|
@ -115,7 +115,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
writer.addIndexes(dir2);
|
||||
writer.close();
|
||||
|
||||
|
@ -134,7 +134,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
assertEquals("f3", fis2.fieldInfo(2).name);
|
||||
assertEquals("f4", fis2.fieldInfo(3).name);
|
||||
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -159,7 +159,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
NoMergePolicy.NO_COMPOUND_FILES));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
|
||||
|
@ -180,7 +180,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
|
||||
: NoMergePolicy.COMPOUND_FILES));
|
||||
Document d = new Document();
|
||||
|
@ -205,7 +205,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
|
||||
: NoMergePolicy.COMPOUND_FILES));
|
||||
Document d = new Document();
|
||||
|
@ -237,7 +237,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
|
||||
: NoMergePolicy.COMPOUND_FILES));
|
||||
writer.deleteDocuments(new Term("f1", "d1"));
|
||||
|
@ -248,7 +248,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
new LogByteSizeMergePolicy()));
|
||||
writer.optimize();
|
||||
assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
|
||||
|
@ -281,7 +281,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
|||
}
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -36,7 +36,7 @@ public class TestCrash extends LuceneTestCase {
|
|||
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
if (initialCommit) {
|
||||
|
|
|
@ -204,7 +204,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(policy);
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -221,7 +221,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
// past commits
|
||||
lastDeleteTime = System.currentTimeMillis();
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setIndexDeletionPolicy(policy);
|
||||
mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -303,7 +303,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
policy.dir = dir;
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
|
||||
.setMergeScheduler(new SerialMergeScheduler());
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
|
@ -324,7 +324,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
}
|
||||
if (!isOptimized) {
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setIndexDeletionPolicy(policy);
|
||||
mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -373,7 +373,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
int preCount = dir.listAll().length;
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setIndexDeletionPolicy(policy));
|
||||
writer.close();
|
||||
int postCount = dir.listAll().length;
|
||||
|
@ -397,7 +397,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setIndexDeletionPolicy(policy).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
@ -419,7 +419,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertTrue(lastCommit != null);
|
||||
|
||||
// Now add 1 doc and optimize
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
|
||||
addDoc(writer);
|
||||
assertEquals(11, writer.numDocs());
|
||||
writer.optimize();
|
||||
|
@ -428,7 +428,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertEquals(6, IndexReader.listCommits(dir).size());
|
||||
|
||||
// Now open writer on the commit just before optimize:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
|
@ -441,7 +441,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertEquals(11, r.numDocs());
|
||||
r.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
|
||||
assertEquals(10, writer.numDocs());
|
||||
// Commits the rollback:
|
||||
|
@ -458,7 +458,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
r.close();
|
||||
|
||||
// Reoptimize
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(policy));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -469,7 +469,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
// Now open writer on the commit just before optimize,
|
||||
// but this time keeping only the last commit:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexCommit(lastCommit));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexCommit(lastCommit));
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
// Reader still sees optimized index, because writer
|
||||
|
@ -505,7 +505,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(10);
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
|
@ -518,7 +518,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
}
|
||||
writer.close();
|
||||
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
|
||||
mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -558,7 +558,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
for(int j=0;j<N+1;j++) {
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(10);
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
|
@ -618,7 +618,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy).setMergePolicy(newLogMergePolicy());
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -634,7 +634,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
System.out.println("\nTEST: cycle i=" + i);
|
||||
}
|
||||
conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
|
||||
mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -662,7 +662,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
reader.close();
|
||||
searcher.close();
|
||||
}
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
|
||||
mp = conf.getMergePolicy();
|
||||
if (mp instanceof LogMergePolicy) {
|
||||
|
@ -741,7 +741,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(10);
|
||||
MergePolicy mp = conf.getMergePolicy();
|
||||
|
@ -756,7 +756,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
for(int i=0;i<N+1;i++) {
|
||||
|
||||
conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(10);
|
||||
mp = conf.getMergePolicy();
|
||||
|
@ -780,7 +780,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
|
||||
// This will not commit: there are no changes
|
||||
// pending because we opened for "create":
|
||||
|
|
|
@ -44,8 +44,8 @@ public class TestDirectoryReader extends LuceneTestCase {
|
|||
doc2 = new Document();
|
||||
DocHelper.setupDoc(doc1);
|
||||
DocHelper.setupDoc(doc2);
|
||||
DocHelper.writeDoc(dir, doc1);
|
||||
DocHelper.writeDoc(dir, doc2);
|
||||
DocHelper.writeDoc(random, dir, doc1);
|
||||
DocHelper.writeDoc(random, dir, doc2);
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ public class TestDirectoryReader extends LuceneTestCase {
|
|||
private void addDoc(Random random, Directory ramDir1, String s, boolean create) throws IOException {
|
||||
IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
create ? OpenMode.CREATE : OpenMode.APPEND));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("body", s, Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -114,7 +114,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
Directory directory = newFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(-1).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
@ -148,7 +148,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
directory = newFSDirectory(indexDir);
|
||||
writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(-1).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
|
|
@ -60,7 +60,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
|||
|
||||
public void testSimple() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc = new Document();
|
||||
Field field = newField("field", "", Field.Index.ANALYZED);
|
||||
doc.add(field);
|
||||
|
@ -228,7 +228,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
|||
|
||||
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
|
||||
// Sometimes swap in codec that impls ord():
|
||||
if (random.nextInt(10) == 7) {
|
||||
|
@ -331,7 +331,7 @@ public class TestDocTermOrds extends LuceneTestCase {
|
|||
|
||||
final int NUM_DOCS = 1000 * RANDOM_MULTIPLIER;
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
|
||||
// Sometimes swap in codec that impls ord():
|
||||
if (random.nextInt(10) == 7) {
|
||||
|
|
|
@ -21,7 +21,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader.AtomicReaderContext;
|
||||
|
@ -34,13 +33,11 @@ import org.apache.lucene.util.ReaderUtil;
|
|||
|
||||
public class TestDocsAndPositions extends LuceneTestCase {
|
||||
private String fieldName;
|
||||
private boolean usePayload;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
fieldName = "field" + random.nextInt();
|
||||
usePayload = random.nextBoolean();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -49,8 +46,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
public void testPositionsSimple() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
|
||||
MockTokenizer.WHITESPACE, true, usePayload)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 39; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newField(fieldName, "1 2 3 4 5 6 7 8 9 10 "
|
||||
|
@ -75,7 +71,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
final int advance = docsAndPosEnum.advance(random.nextInt(atomicReaderContext.reader.maxDoc()));
|
||||
do {
|
||||
String msg = "Advanced to: " + advance + " current doc: "
|
||||
+ docsAndPosEnum.docID() + " usePayloads: " + usePayload;
|
||||
+ docsAndPosEnum.docID(); // TODO: + " usePayloads: " + usePayload;
|
||||
assertEquals(msg, 4, docsAndPosEnum.freq());
|
||||
assertEquals(msg, 0, docsAndPosEnum.nextPosition());
|
||||
assertEquals(msg, 4, docsAndPosEnum.freq());
|
||||
|
@ -115,8 +111,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
public void testRandomPositions() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
|
||||
MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newLogMergePolicy()));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
int numDocs = 131;
|
||||
int max = 1051;
|
||||
int term = random.nextInt(max);
|
||||
|
@ -176,8 +171,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
for (int j = 0; j < howMany; j++) {
|
||||
assertEquals("iteration: " + i + " initDoc: " + initDoc + " doc: "
|
||||
+ docID + " base: " + atomicReaderContext.docBase
|
||||
+ " positions: " + Arrays.toString(pos) + " usePayloads: "
|
||||
+ usePayload, pos[j].intValue(), docsAndPosEnum.nextPosition());
|
||||
+ " positions: " + Arrays.toString(pos) /* TODO: + " usePayloads: "
|
||||
+ usePayload*/, pos[j].intValue(), docsAndPosEnum.nextPosition());
|
||||
}
|
||||
|
||||
if (random.nextInt(10) == 0) { // once is a while advance
|
||||
|
@ -196,8 +191,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
public void testRandomDocs() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
|
||||
MockTokenizer.WHITESPACE, true, usePayload)).setMergePolicy(newLogMergePolicy()));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
int numDocs = 499;
|
||||
int max = 15678;
|
||||
int term = random.nextInt(max);
|
||||
|
@ -275,8 +269,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
public void testLargeNumberOfPositions() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(
|
||||
MockTokenizer.WHITESPACE, true, usePayload)));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
int howMany = 1000;
|
||||
for (int i = 0; i < 39; i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -315,8 +308,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
|
|||
} else {
|
||||
initDoc = docsAndPosEnum.advance(random.nextInt(maxDoc));
|
||||
}
|
||||
String msg = "Iteration: " + i + " initDoc: " + initDoc + " payloads: "
|
||||
+ usePayload;
|
||||
String msg = "Iteration: " + i + " initDoc: " + initDoc; // TODO: + " payloads: " + usePayload;
|
||||
assertEquals(howMany / 2, docsAndPosEnum.freq());
|
||||
for (int j = 0; j < howMany; j += 2) {
|
||||
assertEquals("position missmatch index: " + j + " with freq: "
|
||||
|
|
|
@ -62,7 +62,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
public void testAddDocument() throws Exception {
|
||||
Document testDoc = new Document();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(testDoc);
|
||||
writer.commit();
|
||||
SegmentInfo info = writer.newestSegment();
|
||||
|
@ -211,7 +211,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
|
||||
public void testPreAnalyzedField() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
|
||||
doc.add(new Field("preanalyzed", new TokenStream() {
|
||||
|
@ -271,7 +271,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
doc.add(newField("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
@ -306,7 +306,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
doc.add(newField("f2", "v2", Store.YES, Index.NO));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(doc);
|
||||
writer.optimize(); // be sure to have a single segment
|
||||
writer.close();
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
DocHelper.setupDoc(testDoc);
|
||||
_TestUtil.add(testDoc, fieldInfos);
|
||||
dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
writer.addDocument(testDoc);
|
||||
|
@ -291,7 +291,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
Directory tmpDir = newFSDirectory(file);
|
||||
assertTrue(tmpDir != null);
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setMergePolicy(newLogMergePolicy());
|
||||
((LogMergePolicy) conf.getMergePolicy()).setUseCompoundFile(false);
|
||||
IndexWriter writer = new IndexWriter(tmpDir, conf);
|
||||
writer.addDocument(testDoc);
|
||||
|
@ -478,7 +478,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
try {
|
||||
Directory dir = new FaultyFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
for(int i=0;i<2;i++)
|
||||
writer.addDocument(testDoc);
|
||||
writer.optimize();
|
||||
|
|
|
@ -126,7 +126,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
|||
*/
|
||||
public void testFilterIndexReader() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
Document d1 = new Document();
|
||||
d1.add(newField("default","one two", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -143,7 +143,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
Directory target = newDirectory();
|
||||
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexReader reader = new TestReader(IndexReader.open(directory, true));
|
||||
writer.addIndexes(reader);
|
||||
writer.close();
|
||||
|
|
|
@ -32,7 +32,7 @@ public class TestFlex extends LuceneTestCase {
|
|||
|
||||
IndexWriter w = new IndexWriter(
|
||||
d,
|
||||
new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer()).
|
||||
new IndexWriterConfig(Version.LUCENE_31, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(7)
|
||||
);
|
||||
|
||||
|
@ -64,7 +64,7 @@ public class TestFlex extends LuceneTestCase {
|
|||
public void testTermOrd() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("f", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
|
|
@ -46,7 +46,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dir, config);
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
|
||||
|
@ -83,7 +83,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d3 first field", Store.YES, Index.ANALYZED,
|
||||
TermVector.NO));
|
||||
|
@ -102,7 +102,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.optimize();
|
||||
assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
|
||||
writer.close();
|
||||
|
@ -117,7 +117,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
IndexWriter writer = new IndexWriter(dir, config);
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
|
||||
|
@ -145,7 +145,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
assertFNXFiles(dir, "2.fnx");
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.optimize();
|
||||
assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
|
||||
writer.close();
|
||||
|
@ -160,7 +160,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
|
||||
|
@ -185,7 +185,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d3 first field", Store.YES, Index.ANALYZED,
|
||||
TermVector.NO));
|
||||
|
@ -197,7 +197,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
assertFNXFiles(dir, "2.fnx");
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.optimize();
|
||||
assertFalse(" field numbers got mixed up", writer.anyNonBulkMerges);
|
||||
writer.close();
|
||||
|
@ -210,7 +210,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
for (int i = 0; i < 39; i++) {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()));
|
||||
Document d = new Document();
|
||||
d.add(new Field("f1", "d1 first field", Store.YES, Index.ANALYZED,
|
||||
|
@ -232,7 +232,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
List<IndexCommit> listCommits = IndexReader.listCommits(dir);
|
||||
assertEquals(2, listCommits.size());
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setIndexDeletionPolicy(
|
||||
new MockAnalyzer(random)).setIndexDeletionPolicy(
|
||||
new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0)));
|
||||
|
||||
d = new Document();
|
||||
|
@ -247,7 +247,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
assertFNXFiles(dir, "1.fnx", "2.fnx", "3.fnx");
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()));
|
||||
new MockAnalyzer(random)));
|
||||
writer.commit();
|
||||
listCommits = IndexReader.listCommits(dir);
|
||||
assertEquals(1, listCommits.size());
|
||||
|
@ -290,9 +290,9 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
}
|
||||
Directory base = buildRandomIndex(fieldNames.toArray(new String[0]),
|
||||
20 + random.nextInt(100),
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
FieldNumberBiMap globalFieldMap = writer.segmentInfos
|
||||
.getOrLoadGlobalFieldNumberMap(base);
|
||||
Set<Entry<String, Integer>> entries = globalFieldMap.entries();
|
||||
|
@ -315,7 +315,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
|
||||
Directory base = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
for (String string : fieldNames) {
|
||||
doc.add(newField(string,
|
||||
|
@ -339,9 +339,9 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
for (int j = 0; j < numIndexes; j++) {
|
||||
Directory toAdd = buildRandomIndex(fieldNames.toArray(new String[0]),
|
||||
1 + random.nextInt(50),
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
if (random.nextBoolean()) {
|
||||
IndexReader open = IndexReader.open(toAdd);
|
||||
w.addIndexes(open);
|
||||
|
@ -357,7 +357,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
toAdd.close();
|
||||
}
|
||||
IndexWriter w = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
new LogByteSizeMergePolicy()));
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
@ -402,7 +402,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
}
|
||||
Directory base = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(base, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
|
||||
NoMergePolicy.NO_COMPOUND_FILES));
|
||||
|
||||
SortedMap<Integer, String> copySortedMap = new TreeMap<Integer, String>(
|
||||
|
@ -428,7 +428,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
writer = new IndexWriter(base, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
|
||||
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
|
||||
writer.commit(); // make sure the old index is the latest segment
|
||||
writer.close();
|
||||
|
||||
|
@ -459,7 +459,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
|
|||
.unzip(getDataFile("index." + oldNames[i] + ".zip"), oldIndxeDir);
|
||||
dir = newFSDirectory(oldIndxeDir);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(policy));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(policy));
|
||||
SegmentInfos segmentInfos = writer.segmentInfos;
|
||||
assertTrue(DefaultSegmentInfosWriter.FORMAT_4_0 < segmentInfos.getFormat());
|
||||
assertEquals(0, segmentInfos.getGlobalFieldMapVersion());
|
||||
|
|
|
@ -48,7 +48,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(mergePolicy)
|
||||
);
|
||||
|
@ -152,7 +152,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
|
||||
// Open & close a writer: it should delete the above 4
|
||||
// files and nothing more:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.close();
|
||||
|
||||
String[] files2 = dir.listAll();
|
||||
|
|
|
@ -69,7 +69,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2));
|
||||
for(int i=0;i<27;i++)
|
||||
addDocumentWithFields(writer);
|
||||
|
@ -92,7 +92,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// Change the index
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setMaxBufferedDocs(2));
|
||||
for(int i=0;i<7;i++)
|
||||
addDocumentWithFields(writer);
|
||||
|
@ -104,7 +104,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
r3.close();
|
||||
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -119,7 +119,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testIsCurrent() throws Exception {
|
||||
Directory d = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
// set up reader:
|
||||
|
@ -127,13 +127,13 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTrue(reader.isCurrent());
|
||||
// modify index by adding another document:
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
assertFalse(reader.isCurrent());
|
||||
// re-create index:
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
assertFalse(reader.isCurrent());
|
||||
|
@ -150,7 +150,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
);
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -172,7 +172,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// add more documents
|
||||
writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
|
@ -271,7 +271,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
// want to get some more segments here
|
||||
|
@ -330,7 +330,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -371,7 +371,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Directory dir = newDirectory();
|
||||
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
addDoc(writer, "document number " + (i + 1));
|
||||
|
@ -380,7 +380,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
addDocumentWithTermVectorFields(writer);
|
||||
}
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("bin1", bin));
|
||||
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -417,7 +417,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// force optimize
|
||||
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -446,7 +446,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 11; i++) {
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
|
@ -489,7 +489,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.commit();
|
||||
for (int i = 0; i < 11; i++) {
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -532,7 +532,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
||||
|
@ -577,7 +577,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -632,7 +632,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm2 = new Term("content", "bbb");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
|
@ -647,7 +647,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||
|
||||
// add 100 documents with term : bbb
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer, searchTerm2.text());
|
||||
}
|
||||
|
@ -708,7 +708,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// Create initial data set
|
||||
File dirFile = _TestUtil.getTempDir("TestIndexReader.testFilesOpenClose");
|
||||
Directory dir = newFSDirectory(dirFile);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -718,7 +718,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir = newFSDirectory(dirFile);
|
||||
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -738,7 +738,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
for(int i=0;i<2;i++) {
|
||||
final Directory dir = newDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -755,7 +755,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// incremented:
|
||||
Thread.sleep(1000);
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -768,7 +768,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testVersion() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -779,7 +779,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
reader.close();
|
||||
// modify index and check version has been
|
||||
// incremented:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -790,10 +790,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testLock() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
try {
|
||||
reader.deleteDocument(0);
|
||||
|
@ -814,7 +814,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAll() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -831,7 +831,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterClose() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -847,7 +847,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -883,7 +883,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// First build up a starting index:
|
||||
MockDirectoryWrapper startDir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: create initial index");
|
||||
writer.setInfoStream(System.out);
|
||||
|
@ -1067,7 +1067,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testDocsOutOfOrderJIRA140() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for(int i=0;i<11;i++) {
|
||||
addDoc(writer, "aaa");
|
||||
}
|
||||
|
@ -1085,7 +1085,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
|
||||
// We must add more docs to get a new segment written
|
||||
for(int i=0;i<11;i++) {
|
||||
|
@ -1107,7 +1107,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDoc(writer, "aaa");
|
||||
writer.close();
|
||||
|
||||
|
@ -1163,7 +1163,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testMultiReaderDeletes() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
RandomIndexWriter w= new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("f", "doctor", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
@ -1199,7 +1199,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// add 100 documents with term : aaa
|
||||
// add 100 documents with term : bbb
|
||||
// add 100 documents with term : ccc
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer, searchTerm1.text());
|
||||
addDoc(writer, searchTerm2.text());
|
||||
|
@ -1421,7 +1421,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -1441,7 +1441,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// Change the index
|
||||
writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
@ -1456,7 +1456,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
r2.close();
|
||||
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -1472,7 +1472,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testReadOnly() throws Throwable {
|
||||
Directory d = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDocumentWithFields(writer);
|
||||
writer.commit();
|
||||
addDocumentWithFields(writer);
|
||||
|
@ -1488,7 +1488,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
writer = new IndexWriter(
|
||||
d,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.APPEND).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -1509,7 +1509,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -1530,7 +1530,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// Make sure write lock isn't held
|
||||
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer())
|
||||
new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
writer.close();
|
||||
|
||||
|
@ -1543,7 +1543,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testIndexReader() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("b"));
|
||||
writer.addDocument(createDocument("c"));
|
||||
|
@ -1562,7 +1562,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
MockDirectoryWrapper dir = newDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("b"));
|
||||
writer.addDocument(createDocument("c"));
|
||||
|
@ -1604,7 +1604,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2));
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("a"));
|
||||
|
@ -1628,7 +1628,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// reuse the doc values arrays in FieldCache
|
||||
public void testFieldCacheReuseAfterClone() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1661,7 +1661,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
Document doc = new Document();
|
||||
|
@ -1697,7 +1697,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(-1).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -1741,7 +1741,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1586: getUniqueTermCount
|
||||
public void testUniqueTermCount() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1774,7 +1774,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1609: don't load terms index
|
||||
public void testNoTermsIndex() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodecProvider(_TestUtil.alwaysCodec("Standard")));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(newField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1793,7 +1793,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setCodecProvider(_TestUtil.alwaysCodec("Standard")).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -1821,7 +1821,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testPrepareCommitIsCurrent() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.commit();
|
||||
Document doc = new Document();
|
||||
writer.addDocument(doc);
|
||||
|
@ -1866,7 +1866,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-2812
|
||||
public void testIndexExists() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.addDocument(new Document());
|
||||
writer.prepareCommit();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
|
@ -1879,7 +1879,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// dict cache
|
||||
public void testTotalTermFreqCached() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(newField("f", "a a b", Field.Index.ANALYZED));
|
||||
writer.addDocument(d);
|
||||
|
@ -1901,7 +1901,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-2474
|
||||
public void testReaderFinishedListener() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer.addDocument(new Document());
|
||||
|
|
|
@ -199,7 +199,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
TestIndexReaderReopen.createIndex(random, dir1, true);
|
||||
IndexReader reader1 = IndexReader.open(dir1, false);
|
||||
IndexWriter w = new IndexWriter(dir1, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.optimize();
|
||||
w.close();
|
||||
IndexReader reader2 = reader1.clone(true);
|
||||
|
@ -496,7 +496,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
final Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -75,7 +75,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
similarityProviderOne = new SimilarityProviderOne();
|
||||
anlzr = new MockAnalyzer();
|
||||
anlzr = new MockAnalyzer(random);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -173,7 +173,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
|
||||
private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
|
||||
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newLogMergePolicy()));
|
||||
iwriter.commit();
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
|
@ -700,7 +700,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
final Directory dir = newDirectory();
|
||||
final int n = 30 * RANDOM_MULTIPLIER;
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < n; i++) {
|
||||
writer.addDocument(createDocument(i, 3));
|
||||
}
|
||||
|
@ -721,7 +721,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
modifier.close();
|
||||
} else {
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
modifier.addDocument(createDocument(n + i, 6));
|
||||
modifier.close();
|
||||
}
|
||||
|
@ -937,7 +937,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
public static void createIndex(Random random, Directory dir, boolean multiSegment) throws IOException {
|
||||
IndexWriter.unlock(dir);
|
||||
IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random,
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMergePolicy(new LogDocMergePolicy()));
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -984,7 +984,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: modify index");
|
||||
}
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
w.deleteDocuments(new Term("field2", "a11"));
|
||||
w.deleteDocuments(new Term("field2", "b30"));
|
||||
|
@ -1001,13 +1001,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 2: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.optimize();
|
||||
w.close();
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.optimize();
|
||||
w.addDocument(createDocument(102, 4));
|
||||
|
@ -1024,7 +1024,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 5: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.close();
|
||||
break;
|
||||
|
@ -1188,7 +1188,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setIndexDeletionPolicy(new KeepAllCommits()).
|
||||
setMaxBufferedDocs(-1).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
|
|
@ -86,7 +86,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
try {
|
||||
IndexWriterConfig.setDefaultWriteLockTimeout(2000);
|
||||
assertEquals(2000, IndexWriterConfig.getDefaultWriteLockTimeout());
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
} finally {
|
||||
IndexWriterConfig.setDefaultWriteLockTimeout(savedWriteLockTimeout);
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// optimize the index and check that the new doc count is correct
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
assertEquals(60, writer.numDocs());
|
||||
writer.optimize();
|
||||
assertEquals(60, writer.maxDoc());
|
||||
|
@ -125,7 +125,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
// make sure opening a new index for create over
|
||||
// this existing one works correctly:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
assertEquals(0, writer.maxDoc());
|
||||
assertEquals(0, writer.numDocs());
|
||||
writer.close();
|
||||
|
@ -153,7 +153,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
String[] startFiles = dir.listAll();
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())).rollback();
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))).rollback();
|
||||
String[] endFiles = dir.listAll();
|
||||
|
||||
Arrays.sort(startFiles);
|
||||
|
@ -176,7 +176,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
ldmp.setMinMergeDocs(1);
|
||||
ldmp.setMergeFactor(5);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy(
|
||||
ldmp));
|
||||
for(int j=0;j<numDocs;j++)
|
||||
|
@ -190,7 +190,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
ldmp = new LogDocMergePolicy();
|
||||
ldmp.setMergeFactor(5);
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setMergePolicy(ldmp));
|
||||
new MockAnalyzer(random)).setMergePolicy(ldmp));
|
||||
writer.optimize(3);
|
||||
writer.close();
|
||||
|
||||
|
@ -216,7 +216,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
ldmp.setMinMergeDocs(1);
|
||||
ldmp.setMergeFactor(4);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
|
||||
for(int iter=0;iter<10;iter++) {
|
||||
|
@ -257,7 +257,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testOptimizeTempSpaceUsage() throws IOException {
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: config1=" + writer.getConfig());
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// Import to use same term index interval else a
|
||||
// smaller one here could increase the disk usage and
|
||||
// cause a false failure:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setTermIndexInterval(termIndexInterval).setMergePolicy(newLogMergePolicy()));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -318,7 +318,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
|
||||
// add one document & close writer
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
|
@ -327,7 +327,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
|
||||
// now open index for create:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
assertEquals("should be zero documents", writer.maxDoc(), 0);
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
@ -346,7 +346,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
addDoc(writer);
|
||||
|
||||
// close
|
||||
|
@ -367,7 +367,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnClose() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 14; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -381,7 +381,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for(int i=0;i<3;i++) {
|
||||
for(int j=0;j<11;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -415,7 +415,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnCloseAbort() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
|
||||
for (int i = 0; i < 14; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("first number of hits", 14, hits.length);
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -452,7 +452,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
// Now make sure we can re-open the index, add docs,
|
||||
// and all is good:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||
|
||||
// On abort, writer in fact may write to the same
|
||||
|
@ -489,7 +489,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setReaderPooling(false).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
@ -504,7 +504,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
long startDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergeScheduler(new SerialMergeScheduler()).
|
||||
|
@ -551,7 +551,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -560,7 +560,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.optimize();
|
||||
|
||||
if (VERBOSE) {
|
||||
|
@ -589,7 +589,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: do real optimize");
|
||||
}
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
if (VERBOSE) {
|
||||
writer.setInfoStream(System.out);
|
||||
}
|
||||
|
@ -612,7 +612,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testIndexNoDocuments() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
||||
|
@ -621,7 +621,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(0, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
||||
|
@ -634,7 +634,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testManyFields() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
|
||||
for(int j=0;j<100;j++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newField("a"+j, "aaa" + j, Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -666,7 +666,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setRAMBufferSizeMB(0.000001).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -689,7 +689,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// maxBufferedDocs in a write session
|
||||
public void testChangingRAMBuffer() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.getConfig().setMaxBufferedDocs(10);
|
||||
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
|
||||
|
@ -743,7 +743,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testChangingRAMBuffer2() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.getConfig().setMaxBufferedDocs(10);
|
||||
writer.getConfig().setMaxBufferedDeleteTerms(10);
|
||||
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
|
@ -803,7 +803,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testDiverseDocs() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.5));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.5));
|
||||
for(int i=0;i<3;i++) {
|
||||
// First, docs where every term is unique (heavy on
|
||||
// Posting instances)
|
||||
|
@ -850,7 +850,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testEnablingNorms() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(10));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(10));
|
||||
// Enable norms for only 1 doc, pre flush
|
||||
for(int j=0;j<10;j++) {
|
||||
Document doc = new Document();
|
||||
|
@ -870,7 +870,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(10, hits.length);
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
|
||||
// Enable norms for only 1 doc, post flush
|
||||
for(int j=0;j<27;j++) {
|
||||
|
@ -897,7 +897,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testHighFreqTerm() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01));
|
||||
// Massive doc that has 128 K a's
|
||||
StringBuilder b = new StringBuilder(1024*1024);
|
||||
for(int i=0;i<4096;i++) {
|
||||
|
@ -948,7 +948,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
Directory dir = new MyRAMDirectory(new RAMDirectory());
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -959,7 +959,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("did not get right number of hits", 100, hits.length);
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
writer.close();
|
||||
searcher.close();
|
||||
|
@ -970,7 +970,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
|
@ -992,7 +992,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// empty doc (no norms) and flush
|
||||
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -1019,7 +1019,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
for(int pass=0;pass<2;pass++) {
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(101))
|
||||
|
@ -1065,7 +1065,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testBadSegment() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
Document document = new Document();
|
||||
document.add(newField("tvtest", "", Store.NO, Index.ANALYZED, TermVector.YES));
|
||||
|
@ -1080,7 +1080,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
try {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
|
||||
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
|
||||
IndexWriter iw = new IndexWriter(dir, conf);
|
||||
|
@ -1122,7 +1122,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testSetMaxMergeDocs() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
|
||||
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
|
||||
lmp.setMaxMergeDocs(20);
|
||||
|
@ -1147,7 +1147,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
if (VERBOSE) {
|
||||
System.out.println("TEST: iter=" + i);
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
//LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
//lmp.setMergeFactor(2);
|
||||
|
@ -1184,7 +1184,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
if (0 == i % 4) {
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
//LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
//lmp2.setUseCompoundFile(false);
|
||||
writer.optimize();
|
||||
|
@ -1208,7 +1208,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy())
|
||||
|
@ -1277,7 +1277,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// Reopen
|
||||
writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
}
|
||||
writer.close();
|
||||
|
@ -1291,7 +1291,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testUnlimitedMaxFieldLength() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
Document doc = new Document();
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -1315,7 +1315,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(5))
|
||||
);
|
||||
|
@ -1353,7 +1353,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testExpungeDeletes() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
|
||||
IndexWriterConfig.DISABLE_AUTO_FLUSH));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
@ -1379,7 +1379,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(8, ir.numDocs());
|
||||
ir.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
assertEquals(8, writer.numDocs());
|
||||
assertEquals(10, writer.maxDoc());
|
||||
writer.expungeDeletes();
|
||||
|
@ -1397,7 +1397,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
|
||||
setMergePolicy(newLogMergePolicy(50))
|
||||
|
@ -1427,7 +1427,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(3))
|
||||
);
|
||||
assertEquals(49, writer.numDocs());
|
||||
|
@ -1446,7 +1446,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
|
||||
setMergePolicy(newLogMergePolicy(50))
|
||||
|
@ -1476,7 +1476,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(3))
|
||||
);
|
||||
writer.expungeDeletes(false);
|
||||
|
@ -1491,7 +1491,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1179
|
||||
public void testEmptyFieldName() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1525,7 +1525,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1222
|
||||
public void testDoBeforeAfterFlush() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a field", Field.Store.YES,
|
||||
Field.Index.ANALYZED));
|
||||
|
@ -1579,7 +1579,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-510
|
||||
public void testInvalidUTF16() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
|
||||
final int count = utf8Data.length/2;
|
||||
|
@ -1789,7 +1789,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
};
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", tokens));
|
||||
w.addDocument(doc);
|
||||
|
@ -1828,7 +1828,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(5))
|
||||
);
|
||||
|
@ -1884,7 +1884,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(5))
|
||||
);
|
||||
|
@ -1910,7 +1910,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
reader2.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 17; i++)
|
||||
addDoc(writer);
|
||||
|
||||
|
@ -1938,7 +1938,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testPrepareCommitNoChanges() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.prepareCommit();
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
@ -1952,7 +1952,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1219
|
||||
public void testBinaryFieldOffsetLength() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
byte[] b = new byte[50];
|
||||
for(int i=0;i<50;i++)
|
||||
b[i] = (byte) (i+77);
|
||||
|
@ -1982,7 +1982,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1382
|
||||
public void testCommitUserData() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
for(int j=0;j<17;j++)
|
||||
addDoc(w);
|
||||
w.close();
|
||||
|
@ -1994,7 +1994,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(0, r.getCommitUserData().size());
|
||||
r.close();
|
||||
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
for(int j=0;j<17;j++)
|
||||
addDoc(w);
|
||||
Map<String,String> data = new HashMap<String,String>();
|
||||
|
@ -2008,7 +2008,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("test1", r.getCommitUserData().get("label"));
|
||||
r.close();
|
||||
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
||||
|
@ -2021,7 +2021,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-2529
|
||||
public void testPositionIncrementGapEmptyField() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
MockAnalyzer analyzer = new MockAnalyzer();
|
||||
MockAnalyzer analyzer = new MockAnalyzer(random);
|
||||
analyzer.setPositionIncrementGap( 100 );
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, analyzer));
|
||||
|
@ -2059,7 +2059,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
out.writeByte((byte) 42);
|
||||
out.close();
|
||||
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close();
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
|
||||
|
||||
assertTrue(dir.fileExists("myrandomfile"));
|
||||
} finally {
|
||||
|
@ -2069,7 +2069,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testDeadlock() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -2080,7 +2080,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// index has 2 segments
|
||||
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer2.addDocument(doc);
|
||||
writer2.close();
|
||||
|
||||
|
@ -2119,7 +2119,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
w.close();
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2);
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2);
|
||||
w = new IndexWriter(dir, conf);
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -2224,7 +2224,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testIndexStoreCombos() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
byte[] b = new byte[50];
|
||||
for(int i=0;i<50;i++)
|
||||
b[i] = (byte) (i+77);
|
||||
|
@ -2287,7 +2287,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1727: make sure doc fields are stored in order
|
||||
public void testStoredFieldsOrder() throws Throwable {
|
||||
Directory d = newDirectory();
|
||||
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("zzz", "a b c", Field.Store.YES, Field.Index.NO));
|
||||
doc.add(newField("aaa", "a b c", Field.Store.YES, Field.Index.NO));
|
||||
|
@ -2319,7 +2319,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testEmbeddedFFFF() throws Throwable {
|
||||
|
||||
Directory d = newDirectory();
|
||||
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
@ -2337,7 +2337,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testNoDocsIndex() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
|
||||
writer.setInfoStream(new PrintStream(bos));
|
||||
writer.addDocument(new Document());
|
||||
|
@ -2355,7 +2355,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
final double RUN_SEC = 0.5;
|
||||
final Directory dir = newDirectory();
|
||||
final RandomIndexWriter w = new RandomIndexWriter(random, dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
_TestUtil.reduceOpenFiles(w.w);
|
||||
w.commit();
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
|
@ -2527,7 +2527,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testIndexDivisor() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
config.setTermIndexInterval(2);
|
||||
IndexWriter w = new IndexWriter(dir, config);
|
||||
StringBuilder s = new StringBuilder();
|
||||
|
@ -2564,7 +2564,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(mergePolicy)
|
||||
);
|
||||
Document doc = new Document();
|
||||
|
@ -2626,7 +2626,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
SnapshotDeletionPolicy sdp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(sdp));
|
||||
|
||||
// First commit
|
||||
|
@ -2670,7 +2670,14 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
final Random r = random;
|
||||
|
||||
Directory dir = newDirectory();
|
||||
FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, false)).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1));
|
||||
// note this test explicitly disables payloads
|
||||
final Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
|
||||
}
|
||||
};
|
||||
FlushCountingIndexWriter w = new FlushCountingIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(-1).setMaxBufferedDeleteTerms(-1));
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -2708,7 +2715,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// changed since LUCENE-2386, where before IW would always commit on a fresh
|
||||
// new index.
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
try {
|
||||
IndexReader.listCommits(dir);
|
||||
fail("listCommits should have thrown an exception over empty index");
|
||||
|
@ -2726,7 +2733,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
|
||||
// when listAll() was called in IndexFileDeleter.
|
||||
Directory dir = newFSDirectory(new File(TEMP_DIR, "emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())).close();
|
||||
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))).close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
|
@ -2736,7 +2743,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// files are left in the Directory.
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
|
||||
String[] files = dir.listAll();
|
||||
|
||||
|
@ -2781,14 +2788,14 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(newField("c", "val", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||
w.addDocument(doc);
|
||||
w.addDocument(doc);
|
||||
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
|
||||
w2.close();
|
||||
|
@ -2800,7 +2807,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testFutureCommit() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
|
||||
Document doc = new Document();
|
||||
w.addDocument(doc);
|
||||
|
||||
|
@ -2826,7 +2833,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
assertNotNull(commit);
|
||||
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
|
||||
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
|
||||
|
||||
assertEquals(1, w.numDocs());
|
||||
|
||||
|
@ -2876,7 +2883,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testRandomStoredFields() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
Random rand = random;
|
||||
RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
|
||||
RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(_TestUtil.nextInt(rand, 5, 20)));
|
||||
//w.w.setInfoStream(System.out);
|
||||
//w.w.setUseCompoundFile(false);
|
||||
if (VERBOSE) {
|
||||
|
@ -2972,7 +2979,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testNoUnwantedTVFiles() throws Exception {
|
||||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
|
||||
((LogMergePolicy) indexWriter.getConfig().getMergePolicy()).setUseCompoundFile(false);
|
||||
|
||||
String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
|
||||
|
|
|
@ -49,7 +49,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testDefaults() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass());
|
||||
assertNull(conf.getIndexCommit());
|
||||
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
|
@ -129,7 +129,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testToString() throws Exception {
|
||||
String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).toString();
|
||||
String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).toString();
|
||||
for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
|
||||
int modifiers = f.getModifiers();
|
||||
if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
|
||||
|
@ -146,7 +146,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testClone() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
IndexWriterConfig clone = (IndexWriterConfig) conf.clone();
|
||||
|
||||
// Clone is shallow since not all parameters are cloneable.
|
||||
|
@ -158,7 +158,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testInvalidValues() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
|
||||
// Test IndexDeletionPolicy
|
||||
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
|
|
|
@ -44,7 +44,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
|
||||
|
||||
for (int i = 0; i < keywords.length; i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -79,7 +79,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
modifier.setInfoStream(VERBOSE ? System.out : null);
|
||||
int id = 0;
|
||||
|
@ -113,7 +113,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testMaxBufferedDeletes() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
|
||||
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
writer.addDocument(new Document());
|
||||
|
@ -133,7 +133,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
}
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
|
||||
.setMaxBufferedDeleteTerms(4));
|
||||
modifier.setInfoStream(VERBOSE ? System.out : null);
|
||||
int id = 0;
|
||||
|
@ -172,7 +172,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testBothDeletes() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
|
||||
.setMaxBufferedDeleteTerms(100));
|
||||
|
||||
int id = 0;
|
||||
|
@ -206,7 +206,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testBatchDeletes() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
|
||||
int id = 0;
|
||||
|
@ -249,7 +249,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAll() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
|
||||
int id = 0;
|
||||
|
@ -295,7 +295,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAllRollback() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
|
||||
int id = 0;
|
||||
|
@ -332,7 +332,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAllNRT() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
|
||||
int id = 0;
|
||||
|
@ -423,7 +423,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
MockDirectoryWrapper startDir = newDirectory();
|
||||
// TODO: find the resource leak that only occurs sometimes here.
|
||||
startDir.setNoDeleteOpenFile(false);
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
for (int i = 0; i < 157; i++) {
|
||||
Document d = new Document();
|
||||
d.add(newField("id", Integer.toString(i), Field.Store.YES,
|
||||
|
@ -450,7 +450,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false))
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
|
||||
.setMaxBufferedDocs(1000)
|
||||
.setMaxBufferedDeleteTerms(1000)
|
||||
.setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
|
@ -682,7 +682,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
|
||||
modifier.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
LogMergePolicy lmp = (LogMergePolicy) modifier.getConfig().getMergePolicy();
|
||||
|
@ -809,7 +809,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
modifier.commit();
|
||||
dir.failOn(failure.reset());
|
||||
|
||||
|
@ -837,7 +837,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
public void testDeleteNullQuery() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
addDoc(modifier, i, 2*i);
|
||||
|
|
|
@ -155,7 +155,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
}
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
|
@ -201,7 +201,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
public void testRandomExceptionsThreads() throws Throwable {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
|
@ -289,7 +289,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
public void testExceptionDocumentsWriterInit() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
MockIndexWriter2 w = new MockIndexWriter2(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a field", Field.Store.YES,
|
||||
|
@ -310,7 +310,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// LUCENE-1208
|
||||
public void testExceptionJustBeforeFlush() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a field", Field.Store.YES,
|
||||
|
@ -361,7 +361,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// LUCENE-1210
|
||||
public void testExceptionOnMergeInit() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()).setMergePolicy(newLogMergePolicy());
|
||||
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
|
||||
MockIndexWriter3 w = new MockIndexWriter3(dir, conf);
|
||||
|
@ -494,7 +494,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
failure.setDoFail();
|
||||
dir.failOn(failure);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
Document doc = new Document();
|
||||
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
|
||||
doc.add(newField("content", contents, Field.Store.NO,
|
||||
|
@ -765,7 +765,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
setMergePolicy(newLogMergePolicy(5))
|
||||
|
@ -847,7 +847,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
for (FailOnlyInCommit failure : failures) {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a field", Field.Store.YES,
|
||||
Field.Index.ANALYZED));
|
||||
|
@ -872,7 +872,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
public void testOptimizeExceptions() throws IOException {
|
||||
Directory startDir = newDirectory();
|
||||
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
|
||||
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
|
||||
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
|
||||
IndexWriter w = new IndexWriter(startDir, conf);
|
||||
for(int i=0;i<27;i++)
|
||||
|
@ -884,7 +884,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
System.out.println("TEST: iter " + i);
|
||||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
|
||||
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler());
|
||||
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler());
|
||||
((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
|
||||
w = new IndexWriter(dir, conf);
|
||||
w.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
@ -908,7 +908,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
final List<Throwable> thrown = new ArrayList<Throwable>();
|
||||
final Directory dir = newDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())) {
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))) {
|
||||
@Override
|
||||
public void message(final String message) {
|
||||
if (message.startsWith("now flush at close") && 0 == thrown.size()) {
|
||||
|
@ -951,7 +951,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
// LUCENE-1347
|
||||
public void testRollbackExceptionHang() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
MockIndexWriter4 w = new MockIndexWriter4(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
addDoc(w);
|
||||
w.doFail = true;
|
||||
|
@ -973,7 +973,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -1015,7 +1015,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -1064,7 +1064,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(true))
|
||||
);
|
||||
((LogMergePolicy) writer.getConfig().getMergePolicy()).setNoCFSRatio(1.0);
|
||||
|
@ -1113,7 +1113,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -1151,7 +1151,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
try {
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace(System.out);
|
||||
fail("writer failed to open on a crashed index");
|
||||
|
|
|
@ -75,10 +75,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
|
|||
public void testIndexWriterLockRelease() throws IOException {
|
||||
Directory dir = newFSDirectory(this.__test_dir);
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e) {
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
} catch (FileNotFoundException e1) {
|
||||
}
|
||||
} finally {
|
||||
|
|
|
@ -34,7 +34,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -51,7 +51,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
|
||||
|
||||
boolean noOverMerge = false;
|
||||
|
@ -76,7 +76,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
mp.setMinMergeDocs(100);
|
||||
mp.setMergeFactor(10);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(mp));
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -86,7 +86,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
mp = new LogDocMergePolicy();
|
||||
mp.setMergeFactor(10);
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(mp));
|
||||
mp.setMinMergeDocs(100);
|
||||
checkInvariants(writer);
|
||||
|
@ -102,7 +102,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy())
|
||||
);
|
||||
|
@ -130,7 +130,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
|
||||
.setMergeScheduler(new SerialMergeScheduler()));
|
||||
|
||||
|
@ -144,7 +144,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
|
||||
.setMergeScheduler(new SerialMergeScheduler()));
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
LogDocMergePolicy ldmp = new LogDocMergePolicy();
|
||||
ldmp.setMergeFactor(10);
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new SerialMergeScheduler()));
|
||||
|
||||
// merge policy only fixes segments on levels where merges
|
||||
|
@ -182,7 +182,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
LogDocMergePolicy ldmp = new LogDocMergePolicy();
|
||||
ldmp.setMergeFactor(100);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(10).setMergePolicy(ldmp));
|
||||
|
||||
for (int i = 0; i < 250; i++) {
|
||||
|
@ -198,7 +198,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
ldmp = new LogDocMergePolicy();
|
||||
ldmp.setMergeFactor(5);
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(
|
||||
new MockAnalyzer(random)).setOpenMode(
|
||||
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
|
||||
// merge factor is changed, so check invariants after all adds
|
||||
|
|
|
@ -57,7 +57,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
merged,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
);
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
@ -98,7 +98,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setOpenMode(OpenMode.CREATE).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
|
|
|
@ -58,7 +58,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
}
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
|
||||
dir.setMaxSizeInBytes(diskFree);
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
MergeScheduler ms = writer.getConfig().getMergeScheduler();
|
||||
if (ms instanceof ConcurrentMergeScheduler) {
|
||||
|
@ -152,7 +152,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
long inputDiskUsage = 0;
|
||||
for(int i=0;i<NUM_DIR;i++) {
|
||||
dirs[i] = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for(int j=0;j<25;j++) {
|
||||
addDocWithIndex(writer, 25*i+j);
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
// Now, build a starting index that has START_COUNT docs. We
|
||||
// will then try to addIndexesNoOptimize into a copy of this:
|
||||
MockDirectoryWrapper startDir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for(int j=0;j<START_COUNT;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
}
|
||||
|
@ -232,7 +232,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
|
||||
// Make a new dir that will enforce disk usage:
|
||||
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
|
||||
IOException err = null;
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
||||
|
@ -456,10 +456,10 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
// LUCENE-2593
|
||||
public void testCorruptionAfterDiskFullDuringMerge() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderPooling(true));
|
||||
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergeScheduler(new SerialMergeScheduler()).
|
||||
setReaderPooling(true).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
|
@ -500,7 +500,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
// OK:
|
||||
public void testImmediateDiskFull() throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes()));
|
||||
final Document doc = new Document();
|
||||
|
|
|
@ -65,7 +65,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testAddCloseOpen() throws IOException {
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir1, iwc);
|
||||
for (int i = 0; i < 97 ; i++) {
|
||||
|
@ -99,7 +99,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
assertTrue(reader.isCurrent());
|
||||
writer.close();
|
||||
assertTrue(reader.isCurrent()); // all changes are visible to the reader
|
||||
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
writer = new IndexWriter(dir1, iwc);
|
||||
assertTrue(reader.isCurrent());
|
||||
writer.addDocument(createDocument(1, "x", 1+random.nextInt(5)));
|
||||
|
@ -114,7 +114,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
if (iwc.getMaxBufferedDocs() < 20) {
|
||||
iwc.setMaxBufferedDocs(20);
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
assertEquals(0, count(new Term("id", id10), r3));
|
||||
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
|
||||
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -185,7 +185,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testIsCurrent() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, iwc);
|
||||
Document doc = new Document();
|
||||
|
@ -193,7 +193,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
writer = new IndexWriter(dir, iwc);
|
||||
doc = new Document();
|
||||
doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -230,7 +230,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
if (iwc.getMaxBufferedDocs() < 20) {
|
||||
iwc.setMaxBufferedDocs(20);
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
// create a 2nd index
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
writer2.close();
|
||||
|
@ -287,12 +287,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create a 2nd index
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
writer2.close();
|
||||
|
@ -321,7 +321,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setReaderTermsIndexDivisor(2));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderTermsIndexDivisor(2));
|
||||
writer.setInfoStream(infoStream);
|
||||
// create the index
|
||||
createIndexNoClose(!optimize, "index1", writer);
|
||||
|
@ -359,7 +359,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// reopen the writer to verify the delete made it to the directory
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
assertEquals(0, count(new Term("id", id10), w2r1));
|
||||
|
@ -373,7 +373,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
int numDirs = 3;
|
||||
|
||||
Directory mainDir = newDirectory();
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
_TestUtil.reduceOpenFiles(mainWriter);
|
||||
|
||||
mainWriter.setInfoStream(infoStream);
|
||||
|
@ -418,7 +418,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
this.numDirs = numDirs;
|
||||
this.mainWriter = mainWriter;
|
||||
addDir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2));
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++) {
|
||||
Document doc = createDocument(i, "addindex", 4);
|
||||
writer.addDocument(doc);
|
||||
|
@ -527,7 +527,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
*/
|
||||
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader r1 = writer.getReader();
|
||||
assertEquals(0, r1.maxDoc());
|
||||
|
@ -564,7 +564,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// test whether the changes made it to the directory
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
// insure the deletes were actually flushed to the directory
|
||||
assertEquals(200, w2r1.maxDoc());
|
||||
|
@ -604,7 +604,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
public static void createIndex(Random random, Directory dir1, String indexName,
|
||||
boolean multiSegment) throws IOException {
|
||||
IndexWriter w = new IndexWriter(dir1, LuceneTestCase.newIndexWriterConfig(random,
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMergePolicy(new LogDocMergePolicy()));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
w.addDocument(createDocument(i, indexName, 4));
|
||||
|
@ -642,7 +642,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
MyWarmer warmer = new MyWarmer();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir1,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergedSegmentWarmer(warmer).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
|
@ -678,7 +678,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testAfterCommit() throws Exception {
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
writer.commit();
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
|
@ -711,7 +711,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Make sure reader remains usable even if IndexWriter closes
|
||||
public void testAfterClose() throws Exception {
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create the index
|
||||
|
@ -743,7 +743,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
MockDirectoryWrapper dir1 = newDirectory();
|
||||
final IndexWriter writer = new IndexWriter(
|
||||
dir1,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
@ -830,7 +830,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Directory dir1 = newDirectory();
|
||||
final IndexWriter writer = new IndexWriter(
|
||||
dir1,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(2))
|
||||
);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
@ -915,7 +915,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testExpungeDeletes() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
Field id = newField("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||
|
@ -939,7 +939,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testDeletesNumDocs() throws Throwable {
|
||||
Directory dir = newDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
Field id = newField("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||
|
@ -969,7 +969,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
public void testEmptyIndex() throws Exception {
|
||||
// Ensures that getReader works on an empty index, which hasn't been committed yet.
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
IndexReader r = w.getReader();
|
||||
assertEquals(0, r.numDocs());
|
||||
r.close();
|
||||
|
@ -982,7 +982,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
final AtomicBoolean didWarm = new AtomicBoolean();
|
||||
IndexWriter w = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setReaderPooling(true).
|
||||
setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
|
||||
|
@ -1017,7 +1017,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
illegalCodecs.add("SimpleText");
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setReaderTermsIndexDivisor(-1);
|
||||
new MockAnalyzer(random)).setReaderTermsIndexDivisor(-1);
|
||||
// Don't proceed if picked Codec is in the list of illegal ones.
|
||||
if (illegalCodecs.contains(conf.getCodecProvider().getFieldCodec("f"))) return;
|
||||
|
||||
|
|
|
@ -112,7 +112,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
MockDirectoryWrapper dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -157,7 +157,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -222,7 +222,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(2).
|
||||
setMergeScheduler(new ConcurrentMergeScheduler()).
|
||||
setMergePolicy(newLogMergePolicy(4))
|
||||
|
@ -277,7 +277,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
|
|||
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
|
||||
MockDirectoryWrapper dir = newDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
|
||||
final Document doc = new Document();
|
||||
doc.add(newField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TestLazyBug extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
lmp.setUseCompoundFile(false);
|
||||
|
||||
|
|
|
@ -18,9 +18,12 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.codecs.CodecProvider;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -67,10 +70,17 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
private void createIndex(int numHits) throws IOException {
|
||||
int numDocs = 500;
|
||||
|
||||
final Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new MockTokenizer(reader, MockTokenizer.WHITESPACE, true);
|
||||
}
|
||||
};
|
||||
Directory directory = new SeekCountingDirectory(new RAMDirectory());
|
||||
// note: test explicitly disables payloads
|
||||
IndexWriter writer = new IndexWriter(
|
||||
directory,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, true, false)).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy(false))
|
||||
);
|
||||
|
@ -133,7 +143,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
|
||||
public void testSeek() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(newField(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestLongPostings extends LuceneTestCase {
|
|||
// Produces a realistic unicode random string that
|
||||
// survives MockAnalyzer unchanged:
|
||||
private String getRandomTerm(String other) throws IOException {
|
||||
Analyzer a = new MockAnalyzer();
|
||||
Analyzer a = new MockAnalyzer(random);
|
||||
while(true) {
|
||||
String s = _TestUtil.randomRealisticUnicodeString(random);
|
||||
if (other != null && s.equals(other)) {
|
||||
|
@ -99,7 +99,7 @@ public class TestLongPostings extends LuceneTestCase {
|
|||
|
||||
final IndexReader r;
|
||||
if (true) {
|
||||
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
|
||||
.setMergePolicy(newLogMergePolicy());
|
||||
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random.nextDouble());
|
||||
|
|
|
@ -47,7 +47,7 @@ public class TestMaxTermFrequency extends LuceneTestCase {
|
|||
super.setUp();
|
||||
dir = newDirectory();
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy());
|
||||
new MockAnalyzer(random, MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy());
|
||||
config.setSimilarityProvider(new DefaultSimilarityProvider() {
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
|
|
|
@ -31,7 +31,7 @@ public class TestMultiFields extends LuceneTestCase {
|
|||
for (int iter = 0; iter < num; iter++) {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
|
||||
_TestUtil.keepFullyDeletedSegments(w);
|
||||
|
||||
Map<BytesRef,List<Integer>> docs = new HashMap<BytesRef,List<Integer>>();
|
||||
|
@ -134,7 +134,7 @@ public class TestMultiFields extends LuceneTestCase {
|
|||
|
||||
public void testSeparateEnums() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d = new Document();
|
||||
d.add(newField("f", "j", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
w.addDocument(d);
|
||||
|
|
|
@ -32,7 +32,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
|
|||
Directory mainDir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(
|
||||
mainDir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMaxBufferedDocs(10).
|
||||
setMergePolicy(newLogMergePolicy(false,2))
|
||||
);
|
||||
|
|
|
@ -70,7 +70,7 @@ public class TestNRTThreads extends LuceneTestCase {
|
|||
final LineFileDocs docs = new LineFileDocs(random);
|
||||
final File tempDir = _TestUtil.getTempDir("nrtopenfiles");
|
||||
final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir));
|
||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
conf.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
|
||||
@Override
|
||||
public void warm(IndexReader reader) throws IOException {
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
public class TestNewestSegment extends LuceneTestCase {
|
||||
public void testNewestSegment() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
assertNull(writer.newestSegment());
|
||||
writer.close();
|
||||
directory.close();
|
||||
|
|
|
@ -70,7 +70,7 @@ public class TestNoDeletionPolicy extends LuceneTestCase {
|
|||
public void testAllCommitsRemain() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -68,7 +68,7 @@ public class TestNorms extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
similarityProviderOne = new SimilarityProviderOne();
|
||||
anlzr = new MockAnalyzer();
|
||||
anlzr = new MockAnalyzer(random);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -266,7 +266,7 @@ public class TestNorms extends LuceneTestCase {
|
|||
// LUCENE-1260
|
||||
public void testCustomEncoder() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
config.setSimilarityProvider(new DefaultSimilarityProvider() {
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
|
|
|
@ -32,7 +32,7 @@ public class TestOmitNorms extends LuceneTestCase {
|
|||
// omitNorms bit in the FieldInfo
|
||||
public void testOmitNorms() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
|
||||
Document d = new Document();
|
||||
|
||||
|
@ -79,7 +79,7 @@ public class TestOmitNorms extends LuceneTestCase {
|
|||
// omitNorms for the same field works
|
||||
public void testMixedMerge() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
ram,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
|
@ -137,7 +137,7 @@ public class TestOmitNorms extends LuceneTestCase {
|
|||
// field,
|
||||
public void testMixedRAM() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
ram,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
|
@ -191,7 +191,7 @@ public class TestOmitNorms extends LuceneTestCase {
|
|||
// Verifies no *.nrm exists when all fields omit norms:
|
||||
public void testNoNrmFile() throws Throwable {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
|
||||
writer.setInfoStream(VERBOSE ? System.out : null);
|
||||
|
@ -264,7 +264,7 @@ public class TestOmitNorms extends LuceneTestCase {
|
|||
*/
|
||||
static byte[] getNorms(String field, Field f1, Field f2) throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).setMergePolicy(newLogMergePolicy());
|
||||
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy());
|
||||
RandomIndexWriter riw = new RandomIndexWriter(random, dir, iwc);
|
||||
|
||||
// add f1
|
||||
|
|
|
@ -65,7 +65,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// omitTermFreqAndPositions bit in the FieldInfo
|
||||
public void testOmitTermFreqAndPositions() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
|
||||
Document d = new Document();
|
||||
|
||||
|
@ -112,7 +112,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// omitTermFreqAndPositions for the same field works
|
||||
public void testMixedMerge() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
ram,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
|
@ -168,7 +168,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// field,
|
||||
public void testMixedRAM() throws Exception {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
ram,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
|
@ -219,7 +219,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// Verifies no *.prx exists when all fields omit term freq:
|
||||
public void testNoPrxFile() throws Throwable {
|
||||
Directory ram = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
|
||||
|
@ -251,7 +251,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
// Test scores with one field with Term Freqs and one without, otherwise with equal content
|
||||
public void testBasic() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Analyzer analyzer = new MockAnalyzer();
|
||||
Analyzer analyzer = new MockAnalyzer(random);
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
|
||||
|
|
|
@ -119,7 +119,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
|
||||
// one document only:
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d3 = new Document();
|
||||
d3.add(newField("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
w2.addDocument(d3);
|
||||
|
@ -175,7 +175,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
// add another document to ensure that the indexes are not optimized
|
||||
IndexWriter modifier = new IndexWriter(
|
||||
dir1,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
Document d = new Document();
|
||||
|
@ -185,7 +185,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
|
||||
modifier = new IndexWriter(
|
||||
dir2,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
d = new Document();
|
||||
|
@ -200,7 +200,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
assertFalse(pr.isOptimized());
|
||||
pr.close();
|
||||
|
||||
modifier = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
modifier = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
modifier.optimize();
|
||||
modifier.close();
|
||||
|
||||
|
@ -212,7 +212,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
pr.close();
|
||||
|
||||
|
||||
modifier = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
modifier = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
modifier.optimize();
|
||||
modifier.close();
|
||||
|
||||
|
@ -244,7 +244,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
// Fields 1-4 indexed together:
|
||||
private IndexSearcher single(Random random) throws IOException {
|
||||
dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d1 = new Document();
|
||||
d1.add(newField("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
d1.add(newField("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -274,7 +274,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
|
||||
private Directory getDir1(Random random) throws IOException {
|
||||
Directory dir1 = newDirectory();
|
||||
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d1 = new Document();
|
||||
d1.add(newField("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
d1.add(newField("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -289,7 +289,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
|
||||
private Directory getDir2(Random random) throws IOException {
|
||||
Directory dir2 = newDirectory();
|
||||
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document d3 = new Document();
|
||||
d3.add(newField("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
d3.add(newField("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -45,14 +45,14 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
*/
|
||||
public void testEmptyIndex() throws IOException {
|
||||
Directory rd1 = newDirectory();
|
||||
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
iw.close();
|
||||
|
||||
Directory rd2 = newDirectory(rd1);
|
||||
|
||||
Directory rdOut = newDirectory();
|
||||
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(rd1,true));
|
||||
pr.add(IndexReader.open(rd2,true));
|
||||
|
@ -76,7 +76,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
public void testEmptyIndexWithVectors() throws IOException {
|
||||
Directory rd1 = newDirectory();
|
||||
{
|
||||
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
doc.add(newField("test", "", Store.NO, Index.ANALYZED,
|
||||
TermVector.YES));
|
||||
|
@ -90,14 +90,14 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
ir.deleteDocument(0);
|
||||
ir.close();
|
||||
|
||||
iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setOpenMode(OpenMode.APPEND));
|
||||
iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
}
|
||||
|
||||
Directory rd2 = newDirectory();
|
||||
{
|
||||
IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
Document doc = new Document();
|
||||
iw.addDocument(doc);
|
||||
iw.close();
|
||||
|
@ -105,7 +105,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
|
||||
Directory rdOut = newDirectory();
|
||||
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(rd1,true));
|
||||
pr.add(IndexReader.open(rd2,true));
|
||||
|
|
|
@ -38,7 +38,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
|||
super.setUp();
|
||||
Document doc;
|
||||
rd1 = newDirectory();
|
||||
IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
doc = new Document();
|
||||
doc.add(newField("field1", "the quick brown fox jumps", Store.YES,
|
||||
|
@ -50,7 +50,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
|||
|
||||
iw1.close();
|
||||
rd2 = newDirectory();
|
||||
IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
|
||||
doc = new Document();
|
||||
doc.add(newField("field0", "", Store.NO, Index.ANALYZED));
|
||||
|
|
|
@ -130,7 +130,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
|
|||
throws IOException {
|
||||
IndexWriter writer = new IndexWriter(
|
||||
dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)).
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
);
|
||||
TokenStream payloadTS1 = new PayloadTokenStream("p1");
|
||||
|
@ -188,7 +188,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
|
|||
for (Directory d : dirs) {
|
||||
processors.put(d, new PerTermPayloadProcessor());
|
||||
}
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
|
||||
|
||||
IndexReader[] readers = new IndexReader[dirs.length];
|
||||
|
@ -242,7 +242,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
|
|||
// won't get processed.
|
||||
Map<Directory, DirPayloadProcessor> processors = new HashMap<Directory, DirPayloadProcessor>();
|
||||
processors.put(dir, new PerTermPayloadProcessor());
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(MockTokenizer.WHITESPACE, false)));
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
writer.setPayloadProcessorProvider(new PerDirPayloadProcessor(processors));
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
|
|
@ -479,7 +479,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
|
||||
Directory dir = newDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
final String field = "test";
|
||||
|
||||
Thread[] ingesters = new Thread[numThreads];
|
||||
|
@ -600,16 +600,16 @@ public class TestPayloads extends LuceneTestCase {
|
|||
public void testAcrossFields() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
new MockAnalyzer(MockTokenizer.WHITESPACE, true, true));
|
||||
new MockAnalyzer(random, MockTokenizer.WHITESPACE, true));
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("haspayload", "here we go", Field.Store.YES, Field.Index.ANALYZED));
|
||||
doc.add(new Field("hasMaybepayload", "here we go", Field.Store.YES, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
writer = new RandomIndexWriter(random, dir,
|
||||
new MockAnalyzer(MockTokenizer.WHITESPACE, true, false));
|
||||
new MockAnalyzer(random, MockTokenizer.WHITESPACE, true));
|
||||
doc = new Document();
|
||||
doc.add(new Field("nopayload", "here we go", Field.Store.YES, Field.Index.ANALYZED));
|
||||
doc.add(new Field("hasMaybepayload2", "here we go", Field.Store.YES, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
writer.addDocument(doc);
|
||||
writer.optimize();
|
||||
|
|
|
@ -93,7 +93,7 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
|
|||
Directory dir = newDirectory();
|
||||
CodecProvider provider = new MockCodecProvider();
|
||||
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider(
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodecProvider(
|
||||
provider);
|
||||
IndexWriter writer = newWriter(dir, iwconf);
|
||||
addDocs(writer, 10);
|
||||
|
@ -121,7 +121,7 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
|
|||
System.out.println("TEST: make new index");
|
||||
}
|
||||
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer()).setOpenMode(OpenMode.CREATE).setCodecProvider(provider);
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE).setCodecProvider(provider);
|
||||
iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
|
||||
IndexWriter writer = newWriter(dir, iwconf);
|
||||
|
@ -141,7 +141,7 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
|
|||
assertCodecPerField(_TestUtil.checkIndex(dir, provider), "content",
|
||||
provider.lookup("MockSep"));
|
||||
|
||||
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer())
|
||||
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND).setCodecProvider(provider);
|
||||
//((LogMergePolicy) iwconf.getMergePolicy()).setUseCompoundFile(false);
|
||||
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
|
||||
|
@ -288,7 +288,7 @@ public class TestPerFieldCodecSupport extends LuceneTestCase {
|
|||
provider.setFieldCodec("" + j, codecs[random.nextInt(codecs.length)].name);
|
||||
}
|
||||
IndexWriterConfig config = newIndexWriterConfig(random,
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer());
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random));
|
||||
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
|
||||
config.setCodecProvider(provider);
|
||||
IndexWriter writer = newWriter(dir, config);
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestPerSegmentDeletes extends LuceneTestCase {
|
|||
//IndexWriter.debug2 = System.out;
|
||||
Directory dir = new MockDirectoryWrapper(new Random(random.nextLong()), new RAMDirectory());
|
||||
IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_CURRENT,
|
||||
new MockAnalyzer());
|
||||
new MockAnalyzer(random));
|
||||
iwc.setMergeScheduler(new SerialMergeScheduler());
|
||||
iwc.setMaxBufferedDocs(5000);
|
||||
iwc.setRAMBufferSizeMB(100);
|
||||
|
|
|
@ -38,7 +38,7 @@ public class TestRollback extends LuceneTestCase {
|
|||
rw.close();
|
||||
|
||||
// If buffer size is small enough to cause a flush, errors ensue...
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer()).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
|
||||
|
||||
Term pkTerm = new Term("pk", "");
|
||||
for (int i = 0; i < 3; i++) {
|
||||
|
|
|
@ -34,7 +34,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
|||
|
||||
final LineFileDocs docs = new LineFileDocs(random);
|
||||
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer()));
|
||||
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
|
||||
final int SIZE = 200 * RANDOM_MULTIPLIER;
|
||||
int id = 0;
|
||||
IndexReader r = null;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue