mirror of https://github.com/apache/lucene.git
LUCENE-2248: Change core tests to use a global Version constant
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@908496 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
91ffd60a16
commit
8dbd6e2870
|
@ -233,6 +233,10 @@ Test Cases
|
||||||
* LUCENE-2207, LUCENE-2219: Improve BaseTokenStreamTestCase to check if
|
* LUCENE-2207, LUCENE-2219: Improve BaseTokenStreamTestCase to check if
|
||||||
end() is implemented correctly. (Koji Sekiguchi, Robert Muir)
|
end() is implemented correctly. (Koji Sekiguchi, Robert Muir)
|
||||||
|
|
||||||
|
* LUCENE-2248, LUCENE-2251: Refactor tests to not use Version.LUCENE_CURRENT,
|
||||||
|
but instead use a global static value from LuceneTestCase(J4), that
|
||||||
|
contains the release version. (Uwe Schindler, Simon Willnauer)
|
||||||
|
|
||||||
======================= Release 3.0.0 2009-11-25 =======================
|
======================= Release 3.0.0 2009-11-25 =======================
|
||||||
|
|
||||||
Changes in backwards compatibility policy
|
Changes in backwards compatibility policy
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A very simple demo used in the API documentation (src/java/overview.html).
|
* A very simple demo used in the API documentation (src/java/overview.html).
|
||||||
|
@ -44,7 +43,7 @@ public class TestDemo extends LuceneTestCase {
|
||||||
|
|
||||||
public void testDemo() throws IOException, ParseException {
|
public void testDemo() throws IOException, ParseException {
|
||||||
|
|
||||||
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
// Store the index in memory:
|
// Store the index in memory:
|
||||||
Directory directory = new RAMDirectory();
|
Directory directory = new RAMDirectory();
|
||||||
|
@ -62,7 +61,7 @@ public class TestDemo extends LuceneTestCase {
|
||||||
// Now search the index:
|
// Now search the index:
|
||||||
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
|
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
|
||||||
// Parse a simple query that searches for "text":
|
// Parse a simple query that searches for "text":
|
||||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fieldname", analyzer);
|
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fieldname", analyzer);
|
||||||
Query query = parser.parse("text");
|
Query query = parser.parse("text");
|
||||||
ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs;
|
ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs;
|
||||||
assertEquals(1, hits.length);
|
assertEquals(1, hits.length);
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene;
|
||||||
*/
|
*/
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -96,7 +95,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
||||||
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||||
doc.add(idField);
|
doc.add(idField);
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
MyMergeScheduler ms = new MyMergeScheduler();
|
MyMergeScheduler ms = new MyMergeScheduler();
|
||||||
writer.setMergeScheduler(ms);
|
writer.setMergeScheduler(ms);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
|
|
|
@ -22,7 +22,6 @@ import java.io.PrintWriter;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import junit.framework.TestSuite;
|
import junit.framework.TestSuite;
|
||||||
import junit.textui.TestRunner;
|
import junit.textui.TestRunner;
|
||||||
|
|
||||||
|
@ -74,7 +73,7 @@ public class TestSearch extends LuceneTestCase {
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
Directory directory = new RAMDirectory();
|
Directory directory = new RAMDirectory();
|
||||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
|
@ -108,7 +107,7 @@ public class TestSearch extends LuceneTestCase {
|
||||||
};
|
};
|
||||||
ScoreDoc[] hits = null;
|
ScoreDoc[] hits = null;
|
||||||
|
|
||||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "contents", analyzer);
|
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "contents", analyzer);
|
||||||
parser.setPhraseSlop(4);
|
parser.setPhraseSlop(4);
|
||||||
for (int j = 0; j < queries.length; j++) {
|
for (int j = 0; j < queries.length; j++) {
|
||||||
Query query = parser.parse(queries[j]);
|
Query query = parser.parse(queries[j]);
|
||||||
|
|
|
@ -27,8 +27,6 @@ import org.apache.lucene.analysis.*;
|
||||||
import org.apache.lucene.index.*;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.queryParser.*;
|
import org.apache.lucene.queryParser.*;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import junit.framework.TestSuite;
|
import junit.framework.TestSuite;
|
||||||
import junit.textui.TestRunner;
|
import junit.textui.TestRunner;
|
||||||
|
@ -79,7 +77,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
||||||
|
|
||||||
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
|
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
|
||||||
Directory directory = new RAMDirectory();
|
Directory directory = new RAMDirectory();
|
||||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
|
@ -98,7 +96,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
||||||
// try a search without OR
|
// try a search without OR
|
||||||
Searcher searcher = new IndexSearcher(directory, true);
|
Searcher searcher = new IndexSearcher(directory, true);
|
||||||
|
|
||||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, PRIORITY_FIELD, analyzer);
|
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, PRIORITY_FIELD, analyzer);
|
||||||
|
|
||||||
Query query = parser.parse(HIGH_PRIORITY);
|
Query query = parser.parse(HIGH_PRIORITY);
|
||||||
out.println("Query: " + query.toString(PRIORITY_FIELD));
|
out.println("Query: " + query.toString(PRIORITY_FIELD));
|
||||||
|
@ -113,7 +111,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
||||||
searcher = new IndexSearcher(directory, true);
|
searcher = new IndexSearcher(directory, true);
|
||||||
hits = null;
|
hits = null;
|
||||||
|
|
||||||
parser = new QueryParser(Version.LUCENE_CURRENT, PRIORITY_FIELD, analyzer);
|
parser = new QueryParser(TEST_VERSION_CURRENT, PRIORITY_FIELD, analyzer);
|
||||||
|
|
||||||
query = parser.parse(HIGH_PRIORITY + " OR " + MED_PRIORITY);
|
query = parser.parse(HIGH_PRIORITY + " OR " + MED_PRIORITY);
|
||||||
out.println("Query: " + query.toString(PRIORITY_FIELD));
|
out.println("Query: " + query.toString(PRIORITY_FIELD));
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
|
|
||||||
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
// Force frequent flushes
|
// Force frequent flushes
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -83,7 +83,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
||||||
writer.close();
|
writer.close();
|
||||||
copyFiles(dir, cp);
|
copyFiles(dir, cp);
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
copyFiles(dir, cp);
|
copyFiles(dir, cp);
|
||||||
for(int i=0;i<7;i++) {
|
for(int i=0;i<7;i++) {
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -95,7 +95,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
||||||
writer.close();
|
writer.close();
|
||||||
copyFiles(dir, cp);
|
copyFiles(dir, cp);
|
||||||
dp.release();
|
dp.release();
|
||||||
writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
try {
|
try {
|
||||||
copyFiles(dir, cp);
|
copyFiles(dir, cp);
|
||||||
|
@ -111,7 +111,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
||||||
final long stopTime = System.currentTimeMillis() + 1000;
|
final long stopTime = System.currentTimeMillis() + 1000;
|
||||||
|
|
||||||
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||||
final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
// Force frequent flushes
|
// Force frequent flushes
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.analysis;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -29,7 +27,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
// testLain1Accents() is a copy of TestLatin1AccentFilter.testU().
|
// testLain1Accents() is a copy of TestLatin1AccentFilter.testU().
|
||||||
public void testLatin1Accents() throws Exception {
|
public void testLatin1Accents() throws Exception {
|
||||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader
|
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader
|
||||||
("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ"
|
("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ"
|
||||||
+" Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij"
|
+" Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij"
|
||||||
+" ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
+" ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
||||||
|
@ -1890,7 +1888,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
|
||||||
expectedOutputTokens.add(expected.toString());
|
expectedOutputTokens.add(expected.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(inputText.toString()));
|
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(inputText.toString()));
|
||||||
ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream);
|
ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream);
|
||||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||||
Iterator<String> expectedIter = expectedOutputTokens.iterator();
|
Iterator<String> expectedIter = expectedOutputTokens.iterator();
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.index.Payload;
|
import org.apache.lucene.index.Payload;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestAnalyzers extends BaseTokenStreamTestCase {
|
public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
|
@ -35,7 +34,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
Analyzer a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertAnalyzesTo(a, "foo bar FOO BAR",
|
assertAnalyzesTo(a, "foo bar FOO BAR",
|
||||||
new String[] { "foo", "bar", "foo", "bar" });
|
new String[] { "foo", "bar", "foo", "bar" });
|
||||||
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
||||||
|
@ -55,7 +54,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNull() throws Exception {
|
public void testNull() throws Exception {
|
||||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertAnalyzesTo(a, "foo bar FOO BAR",
|
assertAnalyzesTo(a, "foo bar FOO BAR",
|
||||||
new String[] { "foo", "bar", "FOO", "BAR" });
|
new String[] { "foo", "bar", "FOO", "BAR" });
|
||||||
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
||||||
|
@ -75,7 +74,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStop() throws Exception {
|
public void testStop() throws Exception {
|
||||||
Analyzer a = new StopAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new StopAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertAnalyzesTo(a, "foo bar FOO BAR",
|
assertAnalyzesTo(a, "foo bar FOO BAR",
|
||||||
new String[] { "foo", "bar", "foo", "bar" });
|
new String[] { "foo", "bar", "foo", "bar" });
|
||||||
assertAnalyzesTo(a, "foo a bar such FOO THESE BAR",
|
assertAnalyzesTo(a, "foo a bar such FOO THESE BAR",
|
||||||
|
@ -97,11 +96,11 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
public void testPayloadCopy() throws IOException {
|
public void testPayloadCopy() throws IOException {
|
||||||
String s = "how now brown cow";
|
String s = "how now brown cow";
|
||||||
TokenStream ts;
|
TokenStream ts;
|
||||||
ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(s));
|
ts = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(s));
|
||||||
ts = new PayloadSetter(ts);
|
ts = new PayloadSetter(ts);
|
||||||
verifyPayload(ts);
|
verifyPayload(ts);
|
||||||
|
|
||||||
ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(s));
|
ts = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(s));
|
||||||
ts = new PayloadSetter(ts);
|
ts = new PayloadSetter(ts);
|
||||||
verifyPayload(ts);
|
verifyPayload(ts);
|
||||||
}
|
}
|
||||||
|
@ -122,12 +121,12 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
private static class MyStandardAnalyzer extends StandardAnalyzer {
|
private static class MyStandardAnalyzer extends StandardAnalyzer {
|
||||||
public MyStandardAnalyzer() {
|
public MyStandardAnalyzer() {
|
||||||
super(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
super(TEST_VERSION_CURRENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String field, Reader reader) {
|
public TokenStream tokenStream(String field, Reader reader) {
|
||||||
return new WhitespaceAnalyzer(Version.LUCENE_CURRENT).tokenStream(field, reader);
|
return new WhitespaceAnalyzer(TEST_VERSION_CURRENT).tokenStream(field, reader);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,8 +143,8 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
return new LowerCaseFilter(Version.LUCENE_CURRENT,
|
return new LowerCaseFilter(TEST_VERSION_CURRENT,
|
||||||
new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader));
|
new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -192,9 +191,9 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
||||||
public void testLowerCaseFilterLowSurrogateLeftover() throws IOException {
|
public void testLowerCaseFilterLowSurrogateLeftover() throws IOException {
|
||||||
// test if the limit of the termbuffer is correctly used with supplementary
|
// test if the limit of the termbuffer is correctly used with supplementary
|
||||||
// chars
|
// chars
|
||||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT,
|
||||||
new StringReader("BogustermBogusterm\udc16"));
|
new StringReader("BogustermBogusterm\udc16"));
|
||||||
LowerCaseFilter filter = new LowerCaseFilter(Version.LUCENE_CURRENT,
|
LowerCaseFilter filter = new LowerCaseFilter(TEST_VERSION_CURRENT,
|
||||||
tokenizer);
|
tokenizer);
|
||||||
assertTokenStreamContents(filter, new String[] {"bogustermbogusterm\udc16"});
|
assertTokenStreamContents(filter, new String[] {"bogustermbogusterm\udc16"});
|
||||||
filter.reset();
|
filter.reset();
|
||||||
|
|
|
@ -31,14 +31,13 @@ import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.index.TermPositions;
|
import org.apache.lucene.index.TermPositions;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
||||||
private String[] tokens = new String[] {"term1", "term2", "term3", "term2"};
|
private String[] tokens = new String[] {"term1", "term2", "term3", "term2"};
|
||||||
|
|
||||||
public void testCaching() throws IOException {
|
public void testCaching() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
TokenStream stream = new TokenStream() {
|
TokenStream stream = new TokenStream() {
|
||||||
private int index = 0;
|
private int index = 0;
|
||||||
|
|
|
@ -19,13 +19,12 @@ package org.apache.lucene.analysis;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestCharArrayMap extends LuceneTestCase {
|
public class TestCharArrayMap extends LuceneTestCase {
|
||||||
Random r = newRandom();
|
Random r = newRandom();
|
||||||
|
|
||||||
public void doRandom(int iter, boolean ignoreCase) {
|
public void doRandom(int iter, boolean ignoreCase) {
|
||||||
CharArrayMap<Integer> map = new CharArrayMap<Integer>(Version.LUCENE_CURRENT, 1, ignoreCase);
|
CharArrayMap<Integer> map = new CharArrayMap<Integer>(TEST_VERSION_CURRENT, 1, ignoreCase);
|
||||||
HashMap<String,Integer> hmap = new HashMap<String,Integer>();
|
HashMap<String,Integer> hmap = new HashMap<String,Integer>();
|
||||||
|
|
||||||
char[] key;
|
char[] key;
|
||||||
|
@ -63,7 +62,7 @@ public class TestCharArrayMap extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMethods() {
|
public void testMethods() {
|
||||||
CharArrayMap<Integer> cm = new CharArrayMap<Integer>(Version.LUCENE_CURRENT, 2, false);
|
CharArrayMap<Integer> cm = new CharArrayMap<Integer>(TEST_VERSION_CURRENT, 2, false);
|
||||||
HashMap<String,Integer> hm = new HashMap<String,Integer>();
|
HashMap<String,Integer> hm = new HashMap<String,Integer>();
|
||||||
hm.put("foo",1);
|
hm.put("foo",1);
|
||||||
hm.put("bar",2);
|
hm.put("bar",2);
|
||||||
|
@ -131,7 +130,7 @@ public class TestCharArrayMap extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testModifyOnUnmodifiable(){
|
public void testModifyOnUnmodifiable(){
|
||||||
CharArrayMap<Integer> map = new CharArrayMap<Integer>(Version.LUCENE_CURRENT, 2, false);
|
CharArrayMap<Integer> map = new CharArrayMap<Integer>(TEST_VERSION_CURRENT, 2, false);
|
||||||
map.put("foo",1);
|
map.put("foo",1);
|
||||||
map.put("bar",2);
|
map.put("bar",2);
|
||||||
final int size = map.size();
|
final int size = map.size();
|
||||||
|
@ -228,7 +227,7 @@ public class TestCharArrayMap extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToString() {
|
public void testToString() {
|
||||||
CharArrayMap<Integer> cm = new CharArrayMap<Integer>(Version.LUCENE_CURRENT, Collections.singletonMap("test",1), false);
|
CharArrayMap<Integer> cm = new CharArrayMap<Integer>(TEST_VERSION_CURRENT, Collections.singletonMap("test",1), false);
|
||||||
assertEquals("[test]",cm.keySet().toString());
|
assertEquals("[test]",cm.keySet().toString());
|
||||||
assertEquals("[1]",cm.values().toString());
|
assertEquals("[1]",cm.values().toString());
|
||||||
assertEquals("[test=1]",cm.entrySet().toString());
|
assertEquals("[test=1]",cm.entrySet().toString());
|
||||||
|
|
|
@ -41,7 +41,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
|
|
||||||
|
|
||||||
public void testRehash() throws Exception {
|
public void testRehash() throws Exception {
|
||||||
CharArraySet cas = new CharArraySet(Version.LUCENE_CURRENT, 0, true);
|
CharArraySet cas = new CharArraySet(TEST_VERSION_CURRENT, 0, true);
|
||||||
for(int i=0;i<TEST_STOP_WORDS.length;i++)
|
for(int i=0;i<TEST_STOP_WORDS.length;i++)
|
||||||
cas.add(TEST_STOP_WORDS[i]);
|
cas.add(TEST_STOP_WORDS[i]);
|
||||||
assertEquals(TEST_STOP_WORDS.length, cas.size());
|
assertEquals(TEST_STOP_WORDS.length, cas.size());
|
||||||
|
@ -52,7 +52,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
public void testNonZeroOffset() {
|
public void testNonZeroOffset() {
|
||||||
String[] words={"Hello","World","this","is","a","test"};
|
String[] words={"Hello","World","this","is","a","test"};
|
||||||
char[] findme="xthisy".toCharArray();
|
char[] findme="xthisy".toCharArray();
|
||||||
CharArraySet set=new CharArraySet(Version.LUCENE_CURRENT, 10,true);
|
CharArraySet set=new CharArraySet(TEST_VERSION_CURRENT, 10,true);
|
||||||
set.addAll(Arrays.asList(words));
|
set.addAll(Arrays.asList(words));
|
||||||
assertTrue(set.contains(findme, 1, 4));
|
assertTrue(set.contains(findme, 1, 4));
|
||||||
assertTrue(set.contains(new String(findme,1,4)));
|
assertTrue(set.contains(new String(findme,1,4)));
|
||||||
|
@ -64,7 +64,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testObjectContains() {
|
public void testObjectContains() {
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_CURRENT, 10, true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 10, true);
|
||||||
Integer val = Integer.valueOf(1);
|
Integer val = Integer.valueOf(1);
|
||||||
set.add(val);
|
set.add(val);
|
||||||
assertTrue(set.contains(val));
|
assertTrue(set.contains(val));
|
||||||
|
@ -80,7 +80,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClear(){
|
public void testClear(){
|
||||||
CharArraySet set=new CharArraySet(Version.LUCENE_CURRENT, 10,true);
|
CharArraySet set=new CharArraySet(TEST_VERSION_CURRENT, 10,true);
|
||||||
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
||||||
assertEquals("Not all words added", TEST_STOP_WORDS.length, set.size());
|
assertEquals("Not all words added", TEST_STOP_WORDS.length, set.size());
|
||||||
set.clear();
|
set.clear();
|
||||||
|
@ -94,7 +94,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testModifyOnUnmodifiable(){
|
public void testModifyOnUnmodifiable(){
|
||||||
CharArraySet set=new CharArraySet(Version.LUCENE_CURRENT, 10, true);
|
CharArraySet set=new CharArraySet(TEST_VERSION_CURRENT, 10, true);
|
||||||
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
||||||
final int size = set.size();
|
final int size = set.size();
|
||||||
set = CharArraySet.unmodifiableSet(set);
|
set = CharArraySet.unmodifiableSet(set);
|
||||||
|
@ -150,7 +150,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
// current key (now a char[]) on a Set<String> would not hit any element of the CAS and therefor never call
|
// current key (now a char[]) on a Set<String> would not hit any element of the CAS and therefor never call
|
||||||
// remove() on the iterator
|
// remove() on the iterator
|
||||||
try{
|
try{
|
||||||
set.removeAll(new CharArraySet(Version.LUCENE_CURRENT, Arrays.asList(TEST_STOP_WORDS), true));
|
set.removeAll(new CharArraySet(TEST_VERSION_CURRENT, Arrays.asList(TEST_STOP_WORDS), true));
|
||||||
fail("Modified unmodifiable set");
|
fail("Modified unmodifiable set");
|
||||||
}catch (UnsupportedOperationException e) {
|
}catch (UnsupportedOperationException e) {
|
||||||
// expected
|
// expected
|
||||||
|
@ -158,7 +158,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
try{
|
try{
|
||||||
set.retainAll(new CharArraySet(Version.LUCENE_CURRENT, Arrays.asList(NOT_IN_SET), true));
|
set.retainAll(new CharArraySet(TEST_VERSION_CURRENT, Arrays.asList(NOT_IN_SET), true));
|
||||||
fail("Modified unmodifiable set");
|
fail("Modified unmodifiable set");
|
||||||
}catch (UnsupportedOperationException e) {
|
}catch (UnsupportedOperationException e) {
|
||||||
// expected
|
// expected
|
||||||
|
@ -179,7 +179,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUnmodifiableSet(){
|
public void testUnmodifiableSet(){
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_CURRENT, 10,true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 10,true);
|
||||||
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
||||||
set.add(Integer.valueOf(1));
|
set.add(Integer.valueOf(1));
|
||||||
final int size = set.size();
|
final int size = set.size();
|
||||||
|
@ -209,7 +209,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
"\ud801\udc1c\ud801\udc1cCDE", "A\ud801\udc1cB"};
|
"\ud801\udc1c\ud801\udc1cCDE", "A\ud801\udc1cB"};
|
||||||
String[] lowerArr = new String[] {"abc\ud801\udc44",
|
String[] lowerArr = new String[] {"abc\ud801\udc44",
|
||||||
"\ud801\udc44\ud801\udc44cde", "a\ud801\udc44b"};
|
"\ud801\udc44\ud801\udc44cde", "a\ud801\udc44b"};
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_31, Arrays.asList(TEST_STOP_WORDS), true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, Arrays.asList(TEST_STOP_WORDS), true);
|
||||||
for (String upper : upperArr) {
|
for (String upper : upperArr) {
|
||||||
set.add(upper);
|
set.add(upper);
|
||||||
}
|
}
|
||||||
|
@ -217,7 +217,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
assertTrue(String.format(missing, upperArr[i]), set.contains(upperArr[i]));
|
assertTrue(String.format(missing, upperArr[i]), set.contains(upperArr[i]));
|
||||||
assertTrue(String.format(missing, lowerArr[i]), set.contains(lowerArr[i]));
|
assertTrue(String.format(missing, lowerArr[i]), set.contains(lowerArr[i]));
|
||||||
}
|
}
|
||||||
set = new CharArraySet(Version.LUCENE_31, Arrays.asList(TEST_STOP_WORDS), false);
|
set = new CharArraySet(TEST_VERSION_CURRENT, Arrays.asList(TEST_STOP_WORDS), false);
|
||||||
for (String upper : upperArr) {
|
for (String upper : upperArr) {
|
||||||
set.add(upper);
|
set.add(upper);
|
||||||
}
|
}
|
||||||
|
@ -235,7 +235,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
|
|
||||||
String[] lowerArr = new String[] { "abc\uD800", "abc\uD800efg",
|
String[] lowerArr = new String[] { "abc\uD800", "abc\uD800efg",
|
||||||
"\uD800efg", "\uD800\ud801\udc44b" };
|
"\uD800efg", "\uD800\ud801\udc44b" };
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_31, Arrays
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, Arrays
|
||||||
.asList(TEST_STOP_WORDS), true);
|
.asList(TEST_STOP_WORDS), true);
|
||||||
for (String upper : upperArr) {
|
for (String upper : upperArr) {
|
||||||
set.add(upper);
|
set.add(upper);
|
||||||
|
@ -244,7 +244,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
assertTrue(String.format(missing, upperArr[i]), set.contains(upperArr[i]));
|
assertTrue(String.format(missing, upperArr[i]), set.contains(upperArr[i]));
|
||||||
assertTrue(String.format(missing, lowerArr[i]), set.contains(lowerArr[i]));
|
assertTrue(String.format(missing, lowerArr[i]), set.contains(lowerArr[i]));
|
||||||
}
|
}
|
||||||
set = new CharArraySet(Version.LUCENE_31, Arrays.asList(TEST_STOP_WORDS),
|
set = new CharArraySet(TEST_VERSION_CURRENT, Arrays.asList(TEST_STOP_WORDS),
|
||||||
false);
|
false);
|
||||||
for (String upper : upperArr) {
|
for (String upper : upperArr) {
|
||||||
set.add(upper);
|
set.add(upper);
|
||||||
|
@ -328,8 +328,8 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCopyCharArraySetBWCompat() {
|
public void testCopyCharArraySetBWCompat() {
|
||||||
CharArraySet setIngoreCase = new CharArraySet(Version.LUCENE_CURRENT, 10, true);
|
CharArraySet setIngoreCase = new CharArraySet(TEST_VERSION_CURRENT, 10, true);
|
||||||
CharArraySet setCaseSensitive = new CharArraySet(Version.LUCENE_CURRENT, 10, false);
|
CharArraySet setCaseSensitive = new CharArraySet(TEST_VERSION_CURRENT, 10, false);
|
||||||
|
|
||||||
List<String> stopwords = Arrays.asList(TEST_STOP_WORDS);
|
List<String> stopwords = Arrays.asList(TEST_STOP_WORDS);
|
||||||
List<String> stopwordsUpper = new ArrayList<String>();
|
List<String> stopwordsUpper = new ArrayList<String>();
|
||||||
|
@ -375,8 +375,8 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
* Test the static #copy() function with a CharArraySet as a source
|
* Test the static #copy() function with a CharArraySet as a source
|
||||||
*/
|
*/
|
||||||
public void testCopyCharArraySet() {
|
public void testCopyCharArraySet() {
|
||||||
CharArraySet setIngoreCase = new CharArraySet(Version.LUCENE_CURRENT, 10, true);
|
CharArraySet setIngoreCase = new CharArraySet(TEST_VERSION_CURRENT, 10, true);
|
||||||
CharArraySet setCaseSensitive = new CharArraySet(Version.LUCENE_CURRENT, 10, false);
|
CharArraySet setCaseSensitive = new CharArraySet(TEST_VERSION_CURRENT, 10, false);
|
||||||
|
|
||||||
List<String> stopwords = Arrays.asList(TEST_STOP_WORDS);
|
List<String> stopwords = Arrays.asList(TEST_STOP_WORDS);
|
||||||
List<String> stopwordsUpper = new ArrayList<String>();
|
List<String> stopwordsUpper = new ArrayList<String>();
|
||||||
|
@ -388,8 +388,8 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
setCaseSensitive.addAll(Arrays.asList(TEST_STOP_WORDS));
|
setCaseSensitive.addAll(Arrays.asList(TEST_STOP_WORDS));
|
||||||
setCaseSensitive.add(Integer.valueOf(1));
|
setCaseSensitive.add(Integer.valueOf(1));
|
||||||
|
|
||||||
CharArraySet copy = CharArraySet.copy(Version.LUCENE_CURRENT, setIngoreCase);
|
CharArraySet copy = CharArraySet.copy(TEST_VERSION_CURRENT, setIngoreCase);
|
||||||
CharArraySet copyCaseSens = CharArraySet.copy(Version.LUCENE_CURRENT, setCaseSensitive);
|
CharArraySet copyCaseSens = CharArraySet.copy(TEST_VERSION_CURRENT, setCaseSensitive);
|
||||||
|
|
||||||
assertEquals(setIngoreCase.size(), copy.size());
|
assertEquals(setIngoreCase.size(), copy.size());
|
||||||
assertEquals(setCaseSensitive.size(), copy.size());
|
assertEquals(setCaseSensitive.size(), copy.size());
|
||||||
|
@ -431,7 +431,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
set.addAll(Arrays.asList(TEST_STOP_WORDS));
|
||||||
|
|
||||||
CharArraySet copy = CharArraySet.copy(Version.LUCENE_CURRENT, set);
|
CharArraySet copy = CharArraySet.copy(TEST_VERSION_CURRENT, set);
|
||||||
|
|
||||||
assertEquals(set.size(), copy.size());
|
assertEquals(set.size(), copy.size());
|
||||||
assertEquals(set.size(), copy.size());
|
assertEquals(set.size(), copy.size());
|
||||||
|
@ -461,7 +461,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void testCopyEmptySet() {
|
public void testCopyEmptySet() {
|
||||||
assertSame(CharArraySet.EMPTY_SET,
|
assertSame(CharArraySet.EMPTY_SET,
|
||||||
CharArraySet.copy(Version.LUCENE_CURRENT, CharArraySet.EMPTY_SET));
|
CharArraySet.copy(TEST_VERSION_CURRENT, CharArraySet.EMPTY_SET));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -483,7 +483,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
* Test for NPE
|
* Test for NPE
|
||||||
*/
|
*/
|
||||||
public void testContainsWithNull() {
|
public void testContainsWithNull() {
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_CURRENT, 1, true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 1, true);
|
||||||
try {
|
try {
|
||||||
set.contains((char[]) null, 0, 10);
|
set.contains((char[]) null, 0, 10);
|
||||||
fail("null value must raise NPE");
|
fail("null value must raise NPE");
|
||||||
|
@ -506,7 +506,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
assertTrue("in 3.0 version, iterator should be CharArraySetIterator",
|
assertTrue("in 3.0 version, iterator should be CharArraySetIterator",
|
||||||
((Iterator) CharArraySet.copy(Version.LUCENE_30, hset).iterator()) instanceof CharArraySet.CharArraySetIterator);
|
((Iterator) CharArraySet.copy(Version.LUCENE_30, hset).iterator()) instanceof CharArraySet.CharArraySetIterator);
|
||||||
|
|
||||||
CharArraySet set = CharArraySet.copy(Version.LUCENE_CURRENT, hset);
|
CharArraySet set = CharArraySet.copy(TEST_VERSION_CURRENT, hset);
|
||||||
assertFalse("in current version, iterator should not be CharArraySetIterator",
|
assertFalse("in current version, iterator should not be CharArraySetIterator",
|
||||||
((Iterator) set.iterator()) instanceof CharArraySet.CharArraySetIterator);
|
((Iterator) set.iterator()) instanceof CharArraySet.CharArraySetIterator);
|
||||||
|
|
||||||
|
@ -525,7 +525,7 @@ public class TestCharArraySet extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToString() {
|
public void testToString() {
|
||||||
CharArraySet set = CharArraySet.copy(Version.LUCENE_CURRENT, Collections.singleton("test"));
|
CharArraySet set = CharArraySet.copy(TEST_VERSION_CURRENT, Collections.singleton("test"));
|
||||||
assertEquals("[test]", set.toString());
|
assertEquals("[test]", set.toString());
|
||||||
set.add("test2");
|
set.add("test2");
|
||||||
assertTrue(set.toString().contains(", "));
|
assertTrue(set.toString().contains(", "));
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
// internal buffer size is 1024 make sure we have a surrogate pair right at the border
|
// internal buffer size is 1024 make sure we have a surrogate pair right at the border
|
||||||
builder.insert(1023, "\ud801\udc1c");
|
builder.insert(1023, "\ud801\udc1c");
|
||||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
||||||
Version.LUCENE_CURRENT, new StringReader(builder.toString()));
|
TEST_VERSION_CURRENT, new StringReader(builder.toString()));
|
||||||
assertTokenStreamContents(tokenizer, builder.toString().toLowerCase().split(" "));
|
assertTokenStreamContents(tokenizer, builder.toString().toLowerCase().split(" "));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
builder.append("\ud801\udc1cabc");
|
builder.append("\ud801\udc1cabc");
|
||||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
||||||
Version.LUCENE_CURRENT, new StringReader(builder.toString()));
|
TEST_VERSION_CURRENT, new StringReader(builder.toString()));
|
||||||
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase()});
|
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase()});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
builder.append("A");
|
builder.append("A");
|
||||||
}
|
}
|
||||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
||||||
Version.LUCENE_CURRENT, new StringReader(builder.toString() + builder.toString()));
|
TEST_VERSION_CURRENT, new StringReader(builder.toString() + builder.toString()));
|
||||||
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase(), builder.toString().toLowerCase()});
|
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase(), builder.toString().toLowerCase()});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,13 +94,13 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
builder.append("\ud801\udc1c");
|
builder.append("\ud801\udc1c");
|
||||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(
|
||||||
Version.LUCENE_CURRENT, new StringReader(builder.toString() + builder.toString()));
|
TEST_VERSION_CURRENT, new StringReader(builder.toString() + builder.toString()));
|
||||||
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase(), builder.toString().toLowerCase()});
|
assertTokenStreamContents(tokenizer, new String[] {builder.toString().toLowerCase(), builder.toString().toLowerCase()});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLowerCaseTokenizer() throws IOException {
|
public void testLowerCaseTokenizer() throws IOException {
|
||||||
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
||||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(Version.LUCENE_CURRENT,
|
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(TEST_VERSION_CURRENT,
|
||||||
reader);
|
reader);
|
||||||
assertTokenStreamContents(tokenizer, new String[] { "tokenizer",
|
assertTokenStreamContents(tokenizer, new String[] { "tokenizer",
|
||||||
"\ud801\udc44test" });
|
"\ud801\udc44test" });
|
||||||
|
@ -115,7 +115,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testWhitespaceTokenizer() throws IOException {
|
public void testWhitespaceTokenizer() throws IOException {
|
||||||
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
||||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT,
|
||||||
reader);
|
reader);
|
||||||
assertTokenStreamContents(tokenizer, new String[] { "Tokenizer",
|
assertTokenStreamContents(tokenizer, new String[] { "Tokenizer",
|
||||||
"\ud801\udc1ctest" });
|
"\ud801\udc1ctest" });
|
||||||
|
@ -132,7 +132,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
public void testIsTokenCharCharInSubclass() {
|
public void testIsTokenCharCharInSubclass() {
|
||||||
new TestingCharTokenizer(Version.LUCENE_30, new StringReader(""));
|
new TestingCharTokenizer(Version.LUCENE_30, new StringReader(""));
|
||||||
try {
|
try {
|
||||||
new TestingCharTokenizer(Version.LUCENE_CURRENT, new StringReader(""));
|
new TestingCharTokenizer(TEST_VERSION_CURRENT, new StringReader(""));
|
||||||
fail("version 3.1 is not permitted if char based method is implemented");
|
fail("version 3.1 is not permitted if char based method is implemented");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
// expected
|
// expected
|
||||||
|
@ -142,7 +142,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
public void testNormalizeCharInSubclass() {
|
public void testNormalizeCharInSubclass() {
|
||||||
new TestingCharTokenizerNormalize(Version.LUCENE_30, new StringReader(""));
|
new TestingCharTokenizerNormalize(Version.LUCENE_30, new StringReader(""));
|
||||||
try {
|
try {
|
||||||
new TestingCharTokenizerNormalize(Version.LUCENE_CURRENT,
|
new TestingCharTokenizerNormalize(TEST_VERSION_CURRENT,
|
||||||
new StringReader(""));
|
new StringReader(""));
|
||||||
fail("version 3.1 is not permitted if char based method is implemented");
|
fail("version 3.1 is not permitted if char based method is implemented");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
|
@ -154,7 +154,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
||||||
new TestingCharTokenizerNormalizeIsTokenChar(Version.LUCENE_30,
|
new TestingCharTokenizerNormalizeIsTokenChar(Version.LUCENE_30,
|
||||||
new StringReader(""));
|
new StringReader(""));
|
||||||
try {
|
try {
|
||||||
new TestingCharTokenizerNormalizeIsTokenChar(Version.LUCENE_CURRENT,
|
new TestingCharTokenizerNormalizeIsTokenChar(TEST_VERSION_CURRENT,
|
||||||
new StringReader(""));
|
new StringReader(""));
|
||||||
fail("version 3.1 is not permitted if char based method is implemented");
|
fail("version 3.1 is not permitted if char based method is implemented");
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
|
|
|
@ -18,13 +18,11 @@ package org.apache.lucene.analysis;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
||||||
public class TestISOLatin1AccentFilter extends BaseTokenStreamTestCase {
|
public class TestISOLatin1AccentFilter extends BaseTokenStreamTestCase {
|
||||||
public void testU() throws Exception {
|
public void testU() throws Exception {
|
||||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
||||||
ISOLatin1AccentFilter filter = new ISOLatin1AccentFilter(stream);
|
ISOLatin1AccentFilter filter = new ISOLatin1AccentFilter(stream);
|
||||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||||
assertTermEquals("Des", filter, termAtt);
|
assertTermEquals("Des", filter, termAtt);
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.ScoreDoc;
|
import org.apache.lucene.search.ScoreDoc;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
|
@ -43,7 +42,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
directory = new RAMDirectory();
|
directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory,
|
IndexWriter writer = new IndexWriter(directory,
|
||||||
new SimpleAnalyzer(Version.LUCENE_CURRENT),
|
new SimpleAnalyzer(TEST_VERSION_CURRENT),
|
||||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -57,10 +56,10 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPerFieldAnalyzer() throws Exception {
|
public void testPerFieldAnalyzer() throws Exception {
|
||||||
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer(TEST_VERSION_CURRENT));
|
||||||
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
|
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
|
||||||
|
|
||||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, "description", analyzer);
|
QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, "description", analyzer);
|
||||||
Query query = queryParser.parse("partnum:Q36 AND SPACE");
|
Query query = queryParser.parse("partnum:Q36 AND SPACE");
|
||||||
|
|
||||||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||||
|
|
|
@ -7,7 +7,6 @@ import java.util.Set;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.KeywordAttribute;
|
import org.apache.lucene.analysis.tokenattributes.KeywordAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -34,21 +33,21 @@ public class TestKeywordMarkerTokenFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testIncrementToken() throws IOException {
|
public void testIncrementToken() throws IOException {
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_31, 5, true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 5, true);
|
||||||
set.add("lucenefox");
|
set.add("lucenefox");
|
||||||
String[] output = new String[] { "the", "quick", "brown", "LuceneFox",
|
String[] output = new String[] { "the", "quick", "brown", "LuceneFox",
|
||||||
"jumps" };
|
"jumps" };
|
||||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(
|
||||||
"The quIck browN LuceneFox Jumps")), set)), output);
|
"The quIck browN LuceneFox Jumps")), set)), output);
|
||||||
Set<String> jdkSet = new HashSet<String>();
|
Set<String> jdkSet = new HashSet<String>();
|
||||||
jdkSet.add("LuceneFox");
|
jdkSet.add("LuceneFox");
|
||||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(
|
||||||
"The quIck browN LuceneFox Jumps")), jdkSet)), output);
|
"The quIck browN LuceneFox Jumps")), jdkSet)), output);
|
||||||
Set<?> set2 = set;
|
Set<?> set2 = set;
|
||||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(
|
||||||
"The quIck browN LuceneFox Jumps")), set2)), output);
|
"The quIck browN LuceneFox Jumps")), set2)), output);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,14 +18,12 @@ package org.apache.lucene.analysis;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
||||||
public class TestLengthFilter extends BaseTokenStreamTestCase {
|
public class TestLengthFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testFilter() throws Exception {
|
public void testFilter() throws Exception {
|
||||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
TokenStream stream = new WhitespaceTokenizer(TEST_VERSION_CURRENT,
|
||||||
new StringReader("short toolong evenmuchlongertext a ab toolong foo"));
|
new StringReader("short toolong evenmuchlongertext a ab toolong foo"));
|
||||||
LengthFilter filter = new LengthFilter(stream, 2, 6);
|
LengthFilter filter = new LengthFilter(stream, 2, 6);
|
||||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.lucene.analysis;
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
NormalizeCharMap normMap;
|
NormalizeCharMap normMap;
|
||||||
|
@ -60,55 +58,55 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testNothingChange() throws Exception {
|
public void testNothingChange() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "x" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "x" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer(TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"x"}, new int[]{0}, new int[]{1});
|
assertTokenStreamContents(ts, new String[]{"x"}, new int[]{0}, new int[]{1});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test1to1() throws Exception {
|
public void test1to1() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "h" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "h" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"i"}, new int[]{0}, new int[]{1});
|
assertTokenStreamContents(ts, new String[]{"i"}, new int[]{0}, new int[]{1});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test1to2() throws Exception {
|
public void test1to2() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "j" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "j" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"jj"}, new int[]{0}, new int[]{1});
|
assertTokenStreamContents(ts, new String[]{"jj"}, new int[]{0}, new int[]{1});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test1to3() throws Exception {
|
public void test1to3() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "k" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "k" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"kkk"}, new int[]{0}, new int[]{1});
|
assertTokenStreamContents(ts, new String[]{"kkk"}, new int[]{0}, new int[]{1});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test2to4() throws Exception {
|
public void test2to4() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "ll" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "ll" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"llll"}, new int[]{0}, new int[]{2});
|
assertTokenStreamContents(ts, new String[]{"llll"}, new int[]{0}, new int[]{2});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test2to1() throws Exception {
|
public void test2to1() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "aa" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "aa" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"a"}, new int[]{0}, new int[]{2});
|
assertTokenStreamContents(ts, new String[]{"a"}, new int[]{0}, new int[]{2});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test3to1() throws Exception {
|
public void test3to1() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "bbb" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "bbb" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"b"}, new int[]{0}, new int[]{3});
|
assertTokenStreamContents(ts, new String[]{"b"}, new int[]{0}, new int[]{3});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test4to2() throws Exception {
|
public void test4to2() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "cccc" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "cccc" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[]{"cc"}, new int[]{0}, new int[]{4});
|
assertTokenStreamContents(ts, new String[]{"cc"}, new int[]{0}, new int[]{4});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void test5to0() throws Exception {
|
public void test5to0() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "empty" ) );
|
CharStream cs = new MappingCharFilter( normMap, new StringReader( "empty" ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts, new String[0]);
|
assertTokenStreamContents(ts, new String[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -132,7 +130,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
||||||
//
|
//
|
||||||
public void testTokenStream() throws Exception {
|
public void testTokenStream() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap, CharReader.get( new StringReader( "h i j k ll cccc bbb aa" ) ) );
|
CharStream cs = new MappingCharFilter( normMap, CharReader.get( new StringReader( "h i j k ll cccc bbb aa" ) ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts,
|
assertTokenStreamContents(ts,
|
||||||
new String[]{"i","i","jj","kkk","llll","cc","b","a"},
|
new String[]{"i","i","jj","kkk","llll","cc","b","a"},
|
||||||
new int[]{0,2,4,6,8,11,16,20},
|
new int[]{0,2,4,6,8,11,16,20},
|
||||||
|
@ -153,7 +151,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
||||||
public void testChained() throws Exception {
|
public void testChained() throws Exception {
|
||||||
CharStream cs = new MappingCharFilter( normMap,
|
CharStream cs = new MappingCharFilter( normMap,
|
||||||
new MappingCharFilter( normMap, CharReader.get( new StringReader( "aaaa ll h" ) ) ) );
|
new MappingCharFilter( normMap, CharReader.get( new StringReader( "aaaa ll h" ) ) ) );
|
||||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
TokenStream ts = new WhitespaceTokenizer( TEST_VERSION_CURRENT, cs );
|
||||||
assertTokenStreamContents(ts,
|
assertTokenStreamContents(ts,
|
||||||
new String[]{"a","llllllll","i"},
|
new String[]{"a","llllllll","i"},
|
||||||
new int[]{0,5,8},
|
new int[]{0,5,8},
|
||||||
|
|
|
@ -3,7 +3,6 @@ package org.apache.lucene.analysis;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
@ -26,8 +25,8 @@ public class TestPerFieldAnalzyerWrapper extends BaseTokenStreamTestCase {
|
||||||
public void testPerField() throws Exception {
|
public void testPerField() throws Exception {
|
||||||
String text = "Qwerty";
|
String text = "Qwerty";
|
||||||
PerFieldAnalyzerWrapper analyzer =
|
PerFieldAnalyzerWrapper analyzer =
|
||||||
new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
analyzer.addAnalyzer("special", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
analyzer.addAnalyzer("special", new SimpleAnalyzer(TEST_VERSION_CURRENT));
|
||||||
|
|
||||||
TokenStream tokenStream = analyzer.tokenStream("field",
|
TokenStream tokenStream = analyzer.tokenStream("field",
|
||||||
new StringReader(text));
|
new StringReader(text));
|
||||||
|
|
|
@ -25,8 +25,6 @@ import java.io.InputStreamReader;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.zip.ZipFile;
|
import java.util.zip.ZipFile;
|
||||||
|
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the PorterStemFilter with Martin Porter's test data.
|
* Test the PorterStemFilter with Martin Porter's test data.
|
||||||
*/
|
*/
|
||||||
|
@ -60,9 +58,9 @@ public class TestPorterStemFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testWithKeywordAttribute() throws IOException {
|
public void testWithKeywordAttribute() throws IOException {
|
||||||
CharArraySet set = new CharArraySet(Version.LUCENE_CURRENT, 1, true);
|
CharArraySet set = new CharArraySet(TEST_VERSION_CURRENT, 1, true);
|
||||||
set.add("yourselves");
|
set.add("yourselves");
|
||||||
Tokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("yourselves yours"));
|
Tokenizer tokenizer = new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader("yourselves yours"));
|
||||||
TokenStream filter = new PorterStemFilter(new KeywordMarkerTokenFilter(tokenizer, set));
|
TokenStream filter = new PorterStemFilter(new KeywordMarkerTokenFilter(tokenizer, set));
|
||||||
assertTokenStreamContents(filter, new String[] {"yourselves", "your"});
|
assertTokenStreamContents(filter, new String[] {"yourselves", "your"});
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,16 +23,16 @@ import org.apache.lucene.util.Version;
|
||||||
|
|
||||||
public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
|
public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
private Analyzer a = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
private Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
public void testMaxTermLength() throws Exception {
|
public void testMaxTermLength() throws Exception {
|
||||||
StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
sa.setMaxTokenLength(5);
|
sa.setMaxTokenLength(5);
|
||||||
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"});
|
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"});
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMaxTermLength2() throws Exception {
|
public void testMaxTermLength2() throws Exception {
|
||||||
StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "toolong", "xy", "z"});
|
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "toolong", "xy", "z"});
|
||||||
sa.setMaxTokenLength(5);
|
sa.setMaxTokenLength(5);
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testLucene1140() throws Exception {
|
public void testLucene1140() throws Exception {
|
||||||
try {
|
try {
|
||||||
StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertAnalyzesTo(analyzer, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "<HOST>" });
|
assertAnalyzesTo(analyzer, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "<HOST>" });
|
||||||
} catch (NullPointerException e) {
|
} catch (NullPointerException e) {
|
||||||
fail("Should not throw an NPE and it did");
|
fail("Should not throw an NPE and it did");
|
||||||
|
@ -106,7 +106,7 @@ public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testDomainNames() throws Exception {
|
public void testDomainNames() throws Exception {
|
||||||
// Current lucene should not show the bug
|
// Current lucene should not show the bug
|
||||||
StandardAnalyzer a2 = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer a2 = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
// domain names
|
// domain names
|
||||||
assertAnalyzesTo(a2, "www.nutch.org", new String[]{"www.nutch.org"});
|
assertAnalyzesTo(a2, "www.nutch.org", new String[]{"www.nutch.org"});
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.HashSet;
|
||||||
|
|
||||||
public class TestStopAnalyzer extends BaseTokenStreamTestCase {
|
public class TestStopAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
private StopAnalyzer stop = new StopAnalyzer(Version.LUCENE_CURRENT);
|
private StopAnalyzer stop = new StopAnalyzer(TEST_VERSION_CURRENT);
|
||||||
private Set<Object> inValidTokens = new HashSet<Object>();
|
private Set<Object> inValidTokens = new HashSet<Object>();
|
||||||
|
|
||||||
public TestStopAnalyzer(String s) {
|
public TestStopAnalyzer(String s) {
|
||||||
|
@ -82,7 +82,7 @@ public class TestStopAnalyzer extends BaseTokenStreamTestCase {
|
||||||
stopWordsSet.add("good");
|
stopWordsSet.add("good");
|
||||||
stopWordsSet.add("test");
|
stopWordsSet.add("test");
|
||||||
stopWordsSet.add("analyzer");
|
stopWordsSet.add("analyzer");
|
||||||
StopAnalyzer newStop = new StopAnalyzer(Version.LUCENE_CURRENT, stopWordsSet);
|
StopAnalyzer newStop = new StopAnalyzer(TEST_VERSION_CURRENT, stopWordsSet);
|
||||||
StringReader reader = new StringReader("This is a good test of the english stop analyzer with positions");
|
StringReader reader = new StringReader("This is a good test of the english stop analyzer with positions");
|
||||||
int expectedIncr[] = { 1, 1, 1, 3, 1, 1, 1, 2, 1};
|
int expectedIncr[] = { 1, 1, 1, 3, 1, 1, 1, 2, 1};
|
||||||
TokenStream stream = newStop.tokenStream("test", reader);
|
TokenStream stream = newStop.tokenStream("test", reader);
|
||||||
|
|
|
@ -38,7 +38,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
||||||
public void testExactCase() throws IOException {
|
public void testExactCase() throws IOException {
|
||||||
StringReader reader = new StringReader("Now is The Time");
|
StringReader reader = new StringReader("Now is The Time");
|
||||||
Set<String> stopWords = new HashSet<String>(Arrays.asList("is", "the", "Time"));
|
Set<String> stopWords = new HashSet<String>(Arrays.asList("is", "the", "Time"));
|
||||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopWords, false);
|
TokenStream stream = new StopFilter(TEST_VERSION_CURRENT, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopWords, false);
|
||||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||||
assertTrue(stream.incrementToken());
|
assertTrue(stream.incrementToken());
|
||||||
assertEquals("Now", termAtt.term());
|
assertEquals("Now", termAtt.term());
|
||||||
|
@ -50,7 +50,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
||||||
public void testIgnoreCase() throws IOException {
|
public void testIgnoreCase() throws IOException {
|
||||||
StringReader reader = new StringReader("Now is The Time");
|
StringReader reader = new StringReader("Now is The Time");
|
||||||
Set<Object> stopWords = new HashSet<Object>(Arrays.asList( "is", "the", "Time" ));
|
Set<Object> stopWords = new HashSet<Object>(Arrays.asList( "is", "the", "Time" ));
|
||||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopWords, true);
|
TokenStream stream = new StopFilter(TEST_VERSION_CURRENT, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopWords, true);
|
||||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||||
assertTrue(stream.incrementToken());
|
assertTrue(stream.incrementToken());
|
||||||
assertEquals("Now", termAtt.term());
|
assertEquals("Now", termAtt.term());
|
||||||
|
@ -60,8 +60,8 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
||||||
public void testStopFilt() throws IOException {
|
public void testStopFilt() throws IOException {
|
||||||
StringReader reader = new StringReader("Now is The Time");
|
StringReader reader = new StringReader("Now is The Time");
|
||||||
String[] stopWords = new String[] { "is", "the", "Time" };
|
String[] stopWords = new String[] { "is", "the", "Time" };
|
||||||
Set<Object> stopSet = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords);
|
Set<Object> stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, stopWords);
|
||||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
TokenStream stream = new StopFilter(TEST_VERSION_CURRENT, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopSet);
|
||||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||||
assertTrue(stream.incrementToken());
|
assertTrue(stream.incrementToken());
|
||||||
assertEquals("Now", termAtt.term());
|
assertEquals("Now", termAtt.term());
|
||||||
|
@ -84,14 +84,14 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
||||||
log(sb.toString());
|
log(sb.toString());
|
||||||
String stopWords[] = a.toArray(new String[0]);
|
String stopWords[] = a.toArray(new String[0]);
|
||||||
for (int i=0; i<a.size(); i++) log("Stop: "+stopWords[i]);
|
for (int i=0; i<a.size(); i++) log("Stop: "+stopWords[i]);
|
||||||
Set<Object> stopSet = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords);
|
Set<Object> stopSet = StopFilter.makeStopSet(TEST_VERSION_CURRENT, stopWords);
|
||||||
// with increments
|
// with increments
|
||||||
StringReader reader = new StringReader(sb.toString());
|
StringReader reader = new StringReader(sb.toString());
|
||||||
StopFilter stpf = new StopFilter(Version.LUCENE_24, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
StopFilter stpf = new StopFilter(Version.LUCENE_24, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopSet);
|
||||||
doTestStopPositons(stpf,true);
|
doTestStopPositons(stpf,true);
|
||||||
// without increments
|
// without increments
|
||||||
reader = new StringReader(sb.toString());
|
reader = new StringReader(sb.toString());
|
||||||
stpf = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
stpf = new StopFilter(TEST_VERSION_CURRENT, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopSet);
|
||||||
doTestStopPositons(stpf,false);
|
doTestStopPositons(stpf,false);
|
||||||
// with increments, concatenating two stop filters
|
// with increments, concatenating two stop filters
|
||||||
ArrayList<String> a0 = new ArrayList<String>();
|
ArrayList<String> a0 = new ArrayList<String>();
|
||||||
|
@ -107,12 +107,12 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
||||||
for (int i=0; i<a0.size(); i++) log("Stop0: "+stopWords0[i]);
|
for (int i=0; i<a0.size(); i++) log("Stop0: "+stopWords0[i]);
|
||||||
String stopWords1[] = a1.toArray(new String[0]);
|
String stopWords1[] = a1.toArray(new String[0]);
|
||||||
for (int i=0; i<a1.size(); i++) log("Stop1: "+stopWords1[i]);
|
for (int i=0; i<a1.size(); i++) log("Stop1: "+stopWords1[i]);
|
||||||
Set<Object> stopSet0 = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords0);
|
Set<Object> stopSet0 = StopFilter.makeStopSet(TEST_VERSION_CURRENT, stopWords0);
|
||||||
Set<Object> stopSet1 = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords1);
|
Set<Object> stopSet1 = StopFilter.makeStopSet(TEST_VERSION_CURRENT, stopWords1);
|
||||||
reader = new StringReader(sb.toString());
|
reader = new StringReader(sb.toString());
|
||||||
StopFilter stpf0 = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet0); // first part of the set
|
StopFilter stpf0 = new StopFilter(TEST_VERSION_CURRENT, new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader), stopSet0); // first part of the set
|
||||||
stpf0.setEnablePositionIncrements(true);
|
stpf0.setEnablePositionIncrements(true);
|
||||||
StopFilter stpf01 = new StopFilter(Version.LUCENE_CURRENT, stpf0, stopSet1); // two stop filters concatenated!
|
StopFilter stpf01 = new StopFilter(TEST_VERSION_CURRENT, stpf0, stopSet1); // two stop filters concatenated!
|
||||||
doTestStopPositons(stpf01,true);
|
doTestStopPositons(stpf01,true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,8 +22,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||||
import org.apache.lucene.util.AttributeSource;
|
import org.apache.lucene.util.AttributeSource;
|
||||||
import org.apache.lucene.util.English;
|
import org.apache.lucene.util.English;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
|
|
||||||
|
@ -76,7 +74,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
|
|
||||||
public void testGeneral() throws IOException {
|
public void testGeneral() throws IOException {
|
||||||
final TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer1.toString())));
|
final TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer1.toString())));
|
||||||
final TokenStream sink1 = source.newSinkTokenStream();
|
final TokenStream sink1 = source.newSinkTokenStream();
|
||||||
final TokenStream sink2 = source.newSinkTokenStream(theFilter);
|
final TokenStream sink2 = source.newSinkTokenStream(theFilter);
|
||||||
|
|
||||||
|
@ -90,7 +88,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultipleSources() throws Exception {
|
public void testMultipleSources() throws Exception {
|
||||||
final TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer1.toString())));
|
final TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer1.toString())));
|
||||||
final TeeSinkTokenFilter.SinkTokenStream dogDetector = tee1.newSinkTokenStream(dogFilter);
|
final TeeSinkTokenFilter.SinkTokenStream dogDetector = tee1.newSinkTokenStream(dogFilter);
|
||||||
final TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.newSinkTokenStream(theFilter);
|
final TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.newSinkTokenStream(theFilter);
|
||||||
final TokenStream source1 = new CachingTokenFilter(tee1);
|
final TokenStream source1 = new CachingTokenFilter(tee1);
|
||||||
|
@ -99,7 +97,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
dogDetector.addAttribute(CheckClearAttributesAttribute.class);
|
dogDetector.addAttribute(CheckClearAttributesAttribute.class);
|
||||||
theDetector.addAttribute(CheckClearAttributesAttribute.class);
|
theDetector.addAttribute(CheckClearAttributesAttribute.class);
|
||||||
|
|
||||||
final TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer2.toString())));
|
final TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer2.toString())));
|
||||||
tee2.addSinkTokenStream(dogDetector);
|
tee2.addSinkTokenStream(dogDetector);
|
||||||
tee2.addSinkTokenStream(theDetector);
|
tee2.addSinkTokenStream(theDetector);
|
||||||
final TokenStream source2 = tee2;
|
final TokenStream source2 = tee2;
|
||||||
|
@ -111,7 +109,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
assertTokenStreamContents(dogDetector, new String[]{"Dogs", "Dogs"});
|
assertTokenStreamContents(dogDetector, new String[]{"Dogs", "Dogs"});
|
||||||
|
|
||||||
source1.reset();
|
source1.reset();
|
||||||
TokenStream lowerCasing = new LowerCaseFilter(Version.LUCENE_CURRENT, source1);
|
TokenStream lowerCasing = new LowerCaseFilter(TEST_VERSION_CURRENT, source1);
|
||||||
String[] lowerCaseTokens = new String[tokens1.length];
|
String[] lowerCaseTokens = new String[tokens1.length];
|
||||||
for (int i = 0; i < tokens1.length; i++)
|
for (int i = 0; i < tokens1.length; i++)
|
||||||
lowerCaseTokens[i] = tokens1[i].toLowerCase();
|
lowerCaseTokens[i] = tokens1[i].toLowerCase();
|
||||||
|
@ -133,10 +131,10 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
buffer.append(English.intToEnglish(i).toUpperCase()).append(' ');
|
buffer.append(English.intToEnglish(i).toUpperCase()).append(' ');
|
||||||
}
|
}
|
||||||
//make sure we produce the same tokens
|
//make sure we produce the same tokens
|
||||||
TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))));
|
TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))));
|
||||||
TokenStream sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(100));
|
TokenStream sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(100));
|
||||||
teeStream.consumeAllTokens();
|
teeStream.consumeAllTokens();
|
||||||
TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), 100);
|
TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), 100);
|
||||||
TermAttribute tfTok = stream.addAttribute(TermAttribute.class);
|
TermAttribute tfTok = stream.addAttribute(TermAttribute.class);
|
||||||
TermAttribute sinkTok = sink.addAttribute(TermAttribute.class);
|
TermAttribute sinkTok = sink.addAttribute(TermAttribute.class);
|
||||||
for (int i=0; stream.incrementToken(); i++) {
|
for (int i=0; stream.incrementToken(); i++) {
|
||||||
|
@ -149,12 +147,12 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
int tfPos = 0;
|
int tfPos = 0;
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
for (int i = 0; i < 20; i++) {
|
for (int i = 0; i < 20; i++) {
|
||||||
stream = new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString())));
|
stream = new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString())));
|
||||||
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
||||||
while (stream.incrementToken()) {
|
while (stream.incrementToken()) {
|
||||||
tfPos += posIncrAtt.getPositionIncrement();
|
tfPos += posIncrAtt.getPositionIncrement();
|
||||||
}
|
}
|
||||||
stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), modCounts[j]);
|
stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), modCounts[j]);
|
||||||
posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
|
||||||
while (stream.incrementToken()) {
|
while (stream.incrementToken()) {
|
||||||
tfPos += posIncrAtt.getPositionIncrement();
|
tfPos += posIncrAtt.getPositionIncrement();
|
||||||
|
@ -166,7 +164,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
||||||
//simulate one field with one sink
|
//simulate one field with one sink
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
for (int i = 0; i < 20; i++) {
|
for (int i = 0; i < 20; i++) {
|
||||||
teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))));
|
teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))));
|
||||||
sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(modCounts[j]));
|
sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(modCounts[j]));
|
||||||
PositionIncrementAttribute posIncrAtt = teeStream.getAttribute(PositionIncrementAttribute.class);
|
PositionIncrementAttribute posIncrAtt = teeStream.getAttribute(PositionIncrementAttribute.class);
|
||||||
while (teeStream.incrementToken()) {
|
while (teeStream.incrementToken()) {
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.collation;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.PerFieldAnalyzerWrapper;
|
import org.apache.lucene.analysis.PerFieldAnalyzerWrapper;
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
|
@ -38,14 +37,14 @@ import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.util.IndexableBinaryStringTools;
|
import org.apache.lucene.util.IndexableBinaryStringTools;
|
||||||
import org.apache.lucene.util.Version;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.CharBuffer;
|
import java.nio.CharBuffer;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
|
||||||
public class CollationTestBase extends TestCase {
|
public class CollationTestBase extends LuceneTestCase {
|
||||||
|
|
||||||
protected String firstRangeBeginningOriginal = "\u062F";
|
protected String firstRangeBeginningOriginal = "\u062F";
|
||||||
protected String firstRangeEndOriginal = "\u0698";
|
protected String firstRangeEndOriginal = "\u0698";
|
||||||
|
@ -179,7 +178,7 @@ public class CollationTestBase extends TestCase {
|
||||||
String usResult) throws Exception {
|
String usResult) throws Exception {
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
PerFieldAnalyzerWrapper analyzer
|
PerFieldAnalyzerWrapper analyzer
|
||||||
= new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
= new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
analyzer.addAnalyzer("US", usAnalyzer);
|
analyzer.addAnalyzer("US", usAnalyzer);
|
||||||
analyzer.addAnalyzer("France", franceAnalyzer);
|
analyzer.addAnalyzer("France", franceAnalyzer);
|
||||||
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
|
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class TestBinaryDocument extends LuceneTestCase
|
||||||
|
|
||||||
/** add the doc to a ram index */
|
/** add the doc to a ram index */
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ public class TestBinaryDocument extends LuceneTestCase
|
||||||
|
|
||||||
/** add the doc to a ram index */
|
/** add the doc to a ram index */
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
|
|
@ -154,7 +154,7 @@ public class TestDocument extends LuceneTestCase
|
||||||
public void testGetValuesForIndexedDocument() throws Exception
|
public void testGetValuesForIndexedDocument() throws Exception
|
||||||
{
|
{
|
||||||
RAMDirectory dir = new RAMDirectory();
|
RAMDirectory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(makeDocumentWithFields());
|
writer.addDocument(makeDocumentWithFields());
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ public class TestDocument extends LuceneTestCase
|
||||||
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
|
||||||
RAMDirectory dir = new RAMDirectory();
|
RAMDirectory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
field.setValue("id2");
|
field.setValue("id2");
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Fieldable;
|
import org.apache.lucene.document.Fieldable;
|
||||||
import org.apache.lucene.search.Similarity;
|
import org.apache.lucene.search.Similarity;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Version;
|
import static org.apache.lucene.util.LuceneTestCaseJ4.TEST_VERSION_CURRENT;
|
||||||
|
|
||||||
class DocHelper {
|
class DocHelper {
|
||||||
public static final String FIELD_1_TEXT = "field one text";
|
public static final String FIELD_1_TEXT = "field one text";
|
||||||
|
@ -219,7 +219,7 @@ class DocHelper {
|
||||||
*/
|
*/
|
||||||
public static SegmentInfo writeDoc(Directory dir, Document doc) throws IOException
|
public static SegmentInfo writeDoc(Directory dir, Document doc) throws IOException
|
||||||
{
|
{
|
||||||
return writeDoc(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), Similarity.getDefault(), doc);
|
return writeDoc(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), Similarity.getDefault(), doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -429,7 +427,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
|
|
||||||
private IndexWriter newWriter(Directory dir, boolean create)
|
private IndexWriter newWriter(Directory dir, boolean create)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
|
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
return writer;
|
return writer;
|
||||||
}
|
}
|
||||||
|
@ -503,7 +501,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
public void testHangOnClose() throws IOException {
|
public void testHangOnClose() throws IOException {
|
||||||
|
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
|
writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
|
||||||
writer.setMaxBufferedDocs(5);
|
writer.setMaxBufferedDocs(5);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
|
@ -529,7 +527,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
Directory dir2 = new MockRAMDirectory();
|
Directory dir2 = new MockRAMDirectory();
|
||||||
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
|
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
|
||||||
lmp.setMinMergeMB(0.0001);
|
lmp.setMinMergeMB(0.0001);
|
||||||
writer.setMergePolicy(lmp);
|
writer.setMergePolicy(lmp);
|
||||||
|
|
|
@ -26,7 +26,7 @@ import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class TestAtomicUpdate extends LuceneTestCase {
|
public class TestAtomicUpdate extends LuceneTestCase {
|
||||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
private Random RANDOM;
|
private Random RANDOM;
|
||||||
|
|
||||||
public class MockIndexWriter extends IndexWriter {
|
public class MockIndexWriter extends IndexWriter {
|
||||||
|
|
|
@ -45,7 +45,6 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.util.ReaderUtil;
|
import org.apache.lucene.util.ReaderUtil;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -218,7 +217,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
hasTested29++;
|
hasTested29++;
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.optimize();
|
w.optimize();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
|
@ -273,7 +272,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
public void searchIndex(String dirName, String oldName) throws IOException {
|
public void searchIndex(String dirName, String oldName) throws IOException {
|
||||||
//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
//Query query = parser.parse("handle:1");
|
//Query query = parser.parse("handle:1");
|
||||||
|
|
||||||
dirName = fullDir(dirName);
|
dirName = fullDir(dirName);
|
||||||
|
@ -358,7 +357,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
Directory dir = FSDirectory.open(new File(dirName));
|
Directory dir = FSDirectory.open(new File(dirName));
|
||||||
|
|
||||||
// open writer
|
// open writer
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
// add 10 docs
|
// add 10 docs
|
||||||
for(int i=0;i<10;i++) {
|
for(int i=0;i<10;i++) {
|
||||||
|
@ -402,7 +401,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
searcher.close();
|
searcher.close();
|
||||||
|
|
||||||
// optimize
|
// optimize
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -452,7 +451,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
searcher.close();
|
searcher.close();
|
||||||
|
|
||||||
// optimize
|
// optimize
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -474,7 +473,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
dirName = fullDir(dirName);
|
dirName = fullDir(dirName);
|
||||||
|
|
||||||
Directory dir = FSDirectory.open(new File(dirName));
|
Directory dir = FSDirectory.open(new File(dirName));
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(doCFS);
|
writer.setUseCompoundFile(doCFS);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
|
|
||||||
|
@ -485,7 +484,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
// open fresh writer so we get no prx file in the added segment
|
// open fresh writer so we get no prx file in the added segment
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(doCFS);
|
writer.setUseCompoundFile(doCFS);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
addNoProxDoc(writer);
|
addNoProxDoc(writer);
|
||||||
|
@ -512,7 +511,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
||||||
try {
|
try {
|
||||||
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
|
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setRAMBufferSizeMB(16.0);
|
writer.setRAMBufferSizeMB(16.0);
|
||||||
for(int i=0;i<35;i++) {
|
for(int i=0;i<35;i++) {
|
||||||
addDoc(writer, i);
|
addDoc(writer, i);
|
||||||
|
|
|
@ -24,7 +24,6 @@ import java.util.List;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -35,7 +34,7 @@ public class TestCheckIndex extends LuceneTestCase {
|
||||||
|
|
||||||
public void testDeletedDocs() throws IOException {
|
public void testDeletedDocs() throws IOException {
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -25,13 +25,11 @@ import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
||||||
|
|
||||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
|
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
|
||||||
boolean doFail;
|
boolean doFail;
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.NoLockFactory;
|
import org.apache.lucene.store.NoLockFactory;
|
||||||
|
@ -36,7 +35,7 @@ public class TestCrash extends LuceneTestCase {
|
||||||
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
|
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
|
||||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
//writer.setMaxBufferedDocs(2);
|
//writer.setMaxBufferedDocs(2);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||||
|
|
|
@ -34,7 +34,6 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Verify we can read the pre-2.1 file format, do searches
|
Verify we can read the pre-2.1 file format, do searches
|
||||||
|
@ -202,7 +201,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
|
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -211,7 +210,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
// Record last time when writer performed deletes of
|
// Record last time when writer performed deletes of
|
||||||
// past commits
|
// past commits
|
||||||
lastDeleteTime = System.currentTimeMillis();
|
lastDeleteTime = System.currentTimeMillis();
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
for(int j=0;j<17;j++) {
|
for(int j=0;j<17;j++) {
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
|
@ -272,7 +271,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
policy.dir = dir;
|
policy.dir = dir;
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||||
|
@ -281,7 +280,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -319,7 +318,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
// Open & close a writer and assert that it
|
// Open & close a writer and assert that it
|
||||||
// actually removed something:
|
// actually removed something:
|
||||||
int preCount = dir.listAll().length;
|
int preCount = dir.listAll().length;
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
int postCount = dir.listAll().length;
|
int postCount = dir.listAll().length;
|
||||||
assertTrue(postCount < preCount);
|
assertTrue(postCount < preCount);
|
||||||
|
@ -341,7 +340,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
policy.dir = dir;
|
policy.dir = dir;
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for(int i=0;i<10;i++) {
|
for(int i=0;i<10;i++) {
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
|
@ -360,7 +359,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
assertTrue(lastCommit != null);
|
assertTrue(lastCommit != null);
|
||||||
|
|
||||||
// Now add 1 doc and optimize
|
// Now add 1 doc and optimize
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
assertEquals(11, writer.numDocs());
|
assertEquals(11, writer.numDocs());
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
|
@ -369,7 +368,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
assertEquals(7, IndexReader.listCommits(dir).size());
|
assertEquals(7, IndexReader.listCommits(dir).size());
|
||||||
|
|
||||||
// Now open writer on the commit just before optimize:
|
// Now open writer on the commit just before optimize:
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||||
assertEquals(10, writer.numDocs());
|
assertEquals(10, writer.numDocs());
|
||||||
|
|
||||||
// Should undo our rollback:
|
// Should undo our rollback:
|
||||||
|
@ -381,7 +380,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
assertEquals(11, r.numDocs());
|
assertEquals(11, r.numDocs());
|
||||||
r.close();
|
r.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||||
assertEquals(10, writer.numDocs());
|
assertEquals(10, writer.numDocs());
|
||||||
// Commits the rollback:
|
// Commits the rollback:
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -397,7 +396,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
r.close();
|
r.close();
|
||||||
|
|
||||||
// Reoptimize
|
// Reoptimize
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -408,7 +407,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
|
|
||||||
// Now open writer on the commit just before optimize,
|
// Now open writer on the commit just before optimize,
|
||||||
// but this time keeping only the last commit:
|
// but this time keeping only the last commit:
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||||
assertEquals(10, writer.numDocs());
|
assertEquals(10, writer.numDocs());
|
||||||
|
|
||||||
// Reader still sees optimized index, because writer
|
// Reader still sees optimized index, because writer
|
||||||
|
@ -444,7 +443,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
for(int i=0;i<107;i++) {
|
for(int i=0;i<107;i++) {
|
||||||
|
@ -452,7 +451,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -487,7 +486,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||||
|
|
||||||
for(int j=0;j<N+1;j++) {
|
for(int j=0;j<N+1;j++) {
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
for(int i=0;i<17;i++) {
|
for(int i=0;i<17;i++) {
|
||||||
|
@ -542,14 +541,14 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.close();
|
writer.close();
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
Query query = new TermQuery(searchTerm);
|
Query query = new TermQuery(searchTerm);
|
||||||
|
|
||||||
for(int i=0;i<N+1;i++) {
|
for(int i=0;i<N+1;i++) {
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
for(int j=0;j<17;j++) {
|
for(int j=0;j<17;j++) {
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
|
@ -566,7 +565,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
reader.close();
|
reader.close();
|
||||||
searcher.close();
|
searcher.close();
|
||||||
}
|
}
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
// this is a commit
|
// this is a commit
|
||||||
|
@ -637,7 +636,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -646,7 +645,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
|
|
||||||
for(int i=0;i<N+1;i++) {
|
for(int i=0;i<N+1;i++) {
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setUseCompoundFile(useCompoundFile);
|
writer.setUseCompoundFile(useCompoundFile);
|
||||||
for(int j=0;j<17;j++) {
|
for(int j=0;j<17;j++) {
|
||||||
|
@ -664,7 +663,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
||||||
reader.close();
|
reader.close();
|
||||||
searcher.close();
|
searcher.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
// This will not commit: there are no changes
|
// This will not commit: there are no changes
|
||||||
// pending because we opened for "create":
|
// pending because we opened for "create":
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
|
@ -194,7 +194,7 @@ public class TestDirectoryReader extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
|
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
|
||||||
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
|
doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
|
||||||
iw.addDocument(doc);
|
iw.addDocument(doc);
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
|
|
||||||
/** JUnit adaptation of an older test case DocTest. */
|
/** JUnit adaptation of an older test case DocTest. */
|
||||||
|
@ -110,7 +109,7 @@ public class TestDoc extends LuceneTestCase {
|
||||||
PrintWriter out = new PrintWriter(sw, true);
|
PrintWriter out = new PrintWriter(sw, true);
|
||||||
|
|
||||||
Directory directory = FSDirectory.open(indexDir);
|
Directory directory = FSDirectory.open(indexDir);
|
||||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
SegmentInfo si1 = indexDoc(writer, "test.txt");
|
SegmentInfo si1 = indexDoc(writer, "test.txt");
|
||||||
printSegment(out, si1);
|
printSegment(out, si1);
|
||||||
|
@ -138,7 +137,7 @@ public class TestDoc extends LuceneTestCase {
|
||||||
out = new PrintWriter(sw, true);
|
out = new PrintWriter(sw, true);
|
||||||
|
|
||||||
directory = FSDirectory.open(indexDir);
|
directory = FSDirectory.open(indexDir);
|
||||||
writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
si1 = indexDoc(writer, "test.txt");
|
si1 = indexDoc(writer, "test.txt");
|
||||||
printSegment(out, si1);
|
printSegment(out, si1);
|
||||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.lucene.document.Field.TermVector;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.AttributeSource;
|
import org.apache.lucene.util.AttributeSource;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
public class TestDocumentWriter extends LuceneTestCase {
|
public class TestDocumentWriter extends LuceneTestCase {
|
||||||
|
@ -62,7 +61,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
public void testAddDocument() throws Exception {
|
public void testAddDocument() throws Exception {
|
||||||
Document testDoc = new Document();
|
Document testDoc = new Document();
|
||||||
DocHelper.setupDoc(testDoc);
|
DocHelper.setupDoc(testDoc);
|
||||||
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(testDoc);
|
writer.addDocument(testDoc);
|
||||||
writer.commit();
|
writer.commit();
|
||||||
|
@ -111,7 +110,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
Analyzer analyzer = new Analyzer() {
|
Analyzer analyzer = new Analyzer() {
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
return new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader);
|
return new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -144,7 +143,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
Analyzer analyzer = new Analyzer() {
|
Analyzer analyzer = new Analyzer() {
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
return new TokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader)) {
|
return new TokenFilter(new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader)) {
|
||||||
boolean first=true;
|
boolean first=true;
|
||||||
AttributeSource.State state;
|
AttributeSource.State state;
|
||||||
|
|
||||||
|
@ -208,7 +207,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
|
|
||||||
|
|
||||||
public void testPreAnalyzedField() throws IOException {
|
public void testPreAnalyzedField() throws IOException {
|
||||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
||||||
doc.add(new Field("preanalyzed", new TokenStream() {
|
doc.add(new Field("preanalyzed", new TokenStream() {
|
||||||
|
@ -267,7 +266,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||||
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
|
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -300,7 +299,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
||||||
doc.add(f);
|
doc.add(f);
|
||||||
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
|
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
writer.optimize(); // be sure to have a single segment
|
writer.optimize(); // be sure to have a single segment
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
|
@ -51,7 +50,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
fieldInfos = new FieldInfos();
|
fieldInfos = new FieldInfos();
|
||||||
DocHelper.setupDoc(testDoc);
|
DocHelper.setupDoc(testDoc);
|
||||||
fieldInfos.add(testDoc);
|
fieldInfos.add(testDoc);
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
writer.addDocument(testDoc);
|
writer.addDocument(testDoc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -212,7 +211,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
FSDirectory tmpDir = FSDirectory.open(file);
|
FSDirectory tmpDir = FSDirectory.open(file);
|
||||||
assertTrue(tmpDir != null);
|
assertTrue(tmpDir != null);
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
writer.addDocument(testDoc);
|
writer.addDocument(testDoc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -393,7 +392,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Directory dir = new FaultyFSDirectory(indexDir);
|
Directory dir = new FaultyFSDirectory(indexDir);
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for(int i=0;i<2;i++)
|
for(int i=0;i<2;i++)
|
||||||
writer.addDocument(testDoc);
|
writer.addDocument(testDoc);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.lucene.index;
|
||||||
|
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import junit.framework.TestSuite;
|
import junit.framework.TestSuite;
|
||||||
import junit.textui.TestRunner;
|
import junit.textui.TestRunner;
|
||||||
|
|
||||||
|
@ -99,7 +97,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void testFilterIndexReader() throws Exception {
|
public void testFilterIndexReader() throws Exception {
|
||||||
RAMDirectory directory = new MockRAMDirectory();
|
RAMDirectory directory = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
Document d1 = new Document();
|
Document d1 = new Document();
|
||||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
@ -41,7 +39,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
int i;
|
int i;
|
||||||
for(i=0;i<35;i++) {
|
for(i=0;i<35;i++) {
|
||||||
|
@ -146,7 +144,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
|
||||||
|
|
||||||
// Open & close a writer: it should delete the above 4
|
// Open & close a writer: it should delete the above 4
|
||||||
// files and nothing more:
|
// files and nothing more:
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
String[] files2 = dir.listAll();
|
String[] files2 = dir.listAll();
|
||||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.NoSuchDirectoryException;
|
import org.apache.lucene.store.NoSuchDirectoryException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
public class TestIndexReader extends LuceneTestCase
|
public class TestIndexReader extends LuceneTestCase
|
||||||
|
@ -79,7 +78,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
commitUserData.put("foo", "fighters");
|
commitUserData.put("foo", "fighters");
|
||||||
|
|
||||||
// set up writer
|
// set up writer
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for(int i=0;i<27;i++)
|
for(int i=0;i<27;i++)
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
|
@ -101,7 +100,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertTrue(c.equals(r.getIndexCommit()));
|
assertTrue(c.equals(r.getIndexCommit()));
|
||||||
|
|
||||||
// Change the index
|
// Change the index
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for(int i=0;i<7;i++)
|
for(int i=0;i<7;i++)
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
|
@ -112,7 +111,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertFalse(r2.getIndexCommit().isOptimized());
|
assertFalse(r2.getIndexCommit().isOptimized());
|
||||||
r3.close();
|
r3.close();
|
||||||
|
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -126,19 +125,19 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
public void testIsCurrent() throws Exception
|
public void testIsCurrent() throws Exception
|
||||||
{
|
{
|
||||||
RAMDirectory d = new MockRAMDirectory();
|
RAMDirectory d = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
// set up reader:
|
// set up reader:
|
||||||
IndexReader reader = IndexReader.open(d, false);
|
IndexReader reader = IndexReader.open(d, false);
|
||||||
assertTrue(reader.isCurrent());
|
assertTrue(reader.isCurrent());
|
||||||
// modify index by adding another document:
|
// modify index by adding another document:
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
assertFalse(reader.isCurrent());
|
assertFalse(reader.isCurrent());
|
||||||
// re-create index:
|
// re-create index:
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
assertFalse(reader.isCurrent());
|
assertFalse(reader.isCurrent());
|
||||||
|
@ -154,7 +153,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
{
|
{
|
||||||
RAMDirectory d = new MockRAMDirectory();
|
RAMDirectory d = new MockRAMDirectory();
|
||||||
// set up writer
|
// set up writer
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
// set up reader
|
// set up reader
|
||||||
|
@ -166,7 +165,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertTrue(fieldNames.contains("unstored"));
|
assertTrue(fieldNames.contains("unstored"));
|
||||||
reader.close();
|
reader.close();
|
||||||
// add more documents
|
// add more documents
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
// want to get some more segments here
|
// want to get some more segments here
|
||||||
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
|
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
|
||||||
{
|
{
|
||||||
|
@ -246,7 +245,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
public void testTermVectors() throws Exception {
|
public void testTermVectors() throws Exception {
|
||||||
RAMDirectory d = new MockRAMDirectory();
|
RAMDirectory d = new MockRAMDirectory();
|
||||||
// set up writer
|
// set up writer
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
// want to get some more segments here
|
// want to get some more segments here
|
||||||
// new termvector fields
|
// new termvector fields
|
||||||
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
|
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
|
||||||
|
@ -314,7 +313,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
// add 100 documents with term : aaa
|
// add 100 documents with term : aaa
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 100; i++)
|
for (int i = 0; i < 100; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
|
@ -356,7 +355,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
|
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
addDoc(writer, "document number " + (i + 1));
|
addDoc(writer, "document number " + (i + 1));
|
||||||
|
@ -365,7 +364,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
addDocumentWithTermVectorFields(writer);
|
addDocumentWithTermVectorFields(writer);
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("bin1", bin, Field.Store.YES));
|
doc.add(new Field("bin1", bin, Field.Store.YES));
|
||||||
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -402,7 +401,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// force optimize
|
// force optimize
|
||||||
|
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
reader = IndexReader.open(dir, false);
|
reader = IndexReader.open(dir, false);
|
||||||
|
@ -431,7 +430,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
// add 11 documents with term : aaa
|
// add 11 documents with term : aaa
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 11; i++)
|
for (int i = 0; i < 11; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
|
@ -476,7 +475,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
// add 11 documents with term : aaa
|
// add 11 documents with term : aaa
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 11; i++)
|
for (int i = 0; i < 11; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
|
@ -525,7 +524,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
// add 1 documents with term : aaa
|
// add 1 documents with term : aaa
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -570,7 +569,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm = new Term("content", "aaa");
|
Term searchTerm = new Term("content", "aaa");
|
||||||
|
|
||||||
// add 1 documents with term : aaa
|
// add 1 documents with term : aaa
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -624,7 +623,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
Term searchTerm2 = new Term("content", "bbb");
|
Term searchTerm2 = new Term("content", "bbb");
|
||||||
|
|
||||||
// add 100 documents with term : aaa
|
// add 100 documents with term : aaa
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 100; i++)
|
for (int i = 0; i < 100; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm.text());
|
addDoc(writer, searchTerm.text());
|
||||||
|
@ -640,7 +639,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||||
|
|
||||||
// add 100 documents with term : bbb
|
// add 100 documents with term : bbb
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 100; i++)
|
for (int i = 0; i < 100; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm2.text());
|
addDoc(writer, searchTerm2.text());
|
||||||
|
@ -707,7 +706,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// Create initial data set
|
// Create initial data set
|
||||||
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
|
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
|
||||||
Directory dir = getDirectory();
|
Directory dir = getDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer, "test");
|
addDoc(writer, "test");
|
||||||
writer.close();
|
writer.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -717,7 +716,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
dir = getDirectory();
|
dir = getDirectory();
|
||||||
|
|
||||||
// Now create the data set again, just as before
|
// Now create the data set again, just as before
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer, "test");
|
addDoc(writer, "test");
|
||||||
writer.close();
|
writer.close();
|
||||||
dir.close();
|
dir.close();
|
||||||
|
@ -743,7 +742,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
else
|
else
|
||||||
dir = getDirectory();
|
dir = getDirectory();
|
||||||
assertFalse(IndexReader.indexExists(dir));
|
assertFalse(IndexReader.indexExists(dir));
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -760,7 +759,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// incremented:
|
// incremented:
|
||||||
Thread.sleep(1000);
|
Thread.sleep(1000);
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
reader = IndexReader.open(dir, false);
|
reader = IndexReader.open(dir, false);
|
||||||
|
@ -777,7 +776,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
public void testVersion() throws IOException {
|
public void testVersion() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
assertFalse(IndexReader.indexExists(dir));
|
assertFalse(IndexReader.indexExists(dir));
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -788,7 +787,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
reader.close();
|
reader.close();
|
||||||
// modify index and check version has been
|
// modify index and check version has been
|
||||||
// incremented:
|
// incremented:
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
reader = IndexReader.open(dir, false);
|
reader = IndexReader.open(dir, false);
|
||||||
|
@ -799,10 +798,10 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testLock() throws IOException {
|
public void testLock() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
IndexReader reader = IndexReader.open(dir, false);
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
try {
|
try {
|
||||||
reader.deleteDocument(0);
|
reader.deleteDocument(0);
|
||||||
|
@ -819,7 +818,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testUndeleteAll() throws IOException {
|
public void testUndeleteAll() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -836,7 +835,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testUndeleteAllAfterClose() throws IOException {
|
public void testUndeleteAllAfterClose() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -853,7 +852,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
@ -891,7 +890,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
// First build up a starting index:
|
// First build up a starting index:
|
||||||
RAMDirectory startDir = new MockRAMDirectory();
|
RAMDirectory startDir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for(int i=0;i<157;i++) {
|
for(int i=0;i<157;i++) {
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
@ -1081,7 +1080,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testDocsOutOfOrderJIRA140() throws IOException {
|
public void testDocsOutOfOrderJIRA140() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for(int i=0;i<11;i++) {
|
for(int i=0;i<11;i++) {
|
||||||
addDoc(writer, "aaa");
|
addDoc(writer, "aaa");
|
||||||
}
|
}
|
||||||
|
@ -1099,7 +1098,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
}
|
}
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
// We must add more docs to get a new segment written
|
// We must add more docs to get a new segment written
|
||||||
for(int i=0;i<11;i++) {
|
for(int i=0;i<11;i++) {
|
||||||
|
@ -1121,7 +1120,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
|
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
|
||||||
|
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer, "aaa");
|
addDoc(writer, "aaa");
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -1197,7 +1196,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// add 100 documents with term : aaa
|
// add 100 documents with term : aaa
|
||||||
// add 100 documents with term : bbb
|
// add 100 documents with term : bbb
|
||||||
// add 100 documents with term : ccc
|
// add 100 documents with term : ccc
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 100; i++)
|
for (int i = 0; i < 100; i++)
|
||||||
{
|
{
|
||||||
addDoc(writer, searchTerm1.text());
|
addDoc(writer, searchTerm1.text());
|
||||||
|
@ -1421,7 +1420,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
RAMDirectory d = new MockRAMDirectory();
|
RAMDirectory d = new MockRAMDirectory();
|
||||||
|
|
||||||
// set up writer
|
// set up writer
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for(int i=0;i<27;i++)
|
for(int i=0;i<27;i++)
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
|
@ -1437,7 +1436,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertTrue(c.equals(r.getIndexCommit()));
|
assertTrue(c.equals(r.getIndexCommit()));
|
||||||
|
|
||||||
// Change the index
|
// Change the index
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for(int i=0;i<7;i++)
|
for(int i=0;i<7;i++)
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
|
@ -1448,7 +1447,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertFalse(r2.getIndexCommit().isOptimized());
|
assertFalse(r2.getIndexCommit().isOptimized());
|
||||||
r2.close();
|
r2.close();
|
||||||
|
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -1462,7 +1461,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
public void testReadOnly() throws Throwable {
|
public void testReadOnly() throws Throwable {
|
||||||
RAMDirectory d = new MockRAMDirectory();
|
RAMDirectory d = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.commit();
|
writer.commit();
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
|
@ -1476,7 +1475,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
|
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDocumentWithFields(writer);
|
addDocumentWithFields(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -1493,7 +1492,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
|
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -1511,7 +1510,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure write lock isn't held
|
// Make sure write lock isn't held
|
||||||
writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
r3.close();
|
r3.close();
|
||||||
|
@ -1521,7 +1520,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// LUCENE-1474
|
// LUCENE-1474
|
||||||
public void testIndexReader() throws Exception {
|
public void testIndexReader() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.addDocument(createDocument("a"));
|
writer.addDocument(createDocument("a"));
|
||||||
writer.addDocument(createDocument("b"));
|
writer.addDocument(createDocument("b"));
|
||||||
|
@ -1539,7 +1538,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
public void testIndexReaderUnDeleteAll() throws Exception {
|
public void testIndexReaderUnDeleteAll() throws Exception {
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
dir.setPreventDoubleWrite(false);
|
dir.setPreventDoubleWrite(false);
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.addDocument(createDocument("a"));
|
writer.addDocument(createDocument("a"));
|
||||||
writer.addDocument(createDocument("b"));
|
writer.addDocument(createDocument("b"));
|
||||||
|
@ -1581,7 +1580,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
|
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
|
@ -1607,7 +1606,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// reuse the doc values arrays in FieldCache
|
// reuse the doc values arrays in FieldCache
|
||||||
public void testFieldCacheReuseAfterClone() throws Exception {
|
public void testFieldCacheReuseAfterClone() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -1638,7 +1637,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// FieldCache
|
// FieldCache
|
||||||
public void testFieldCacheReuseAfterReopen() throws Exception {
|
public void testFieldCacheReuseAfterReopen() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -1670,7 +1669,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// reopen switches readOnly
|
// reopen switches readOnly
|
||||||
public void testReopenChangeReadonly() throws Exception {
|
public void testReopenChangeReadonly() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
@ -1711,7 +1710,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// LUCENE-1586: getUniqueTermCount
|
// LUCENE-1586: getUniqueTermCount
|
||||||
public void testUniqueTermCount() throws Exception {
|
public void testUniqueTermCount() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -1744,7 +1743,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// LUCENE-1609: don't load terms index
|
// LUCENE-1609: don't load terms index
|
||||||
public void testNoTermsIndex() throws Throwable {
|
public void testNoTermsIndex() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -1762,7 +1761,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
|
assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
|
||||||
|
|
||||||
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
|
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -1781,7 +1780,7 @@ public class TestIndexReader extends LuceneTestCase
|
||||||
// LUCENE-2046
|
// LUCENE-2046
|
||||||
public void testPrepareCommitIsCurrent() throws Throwable {
|
public void testPrepareCommitIsCurrent() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
IndexReader r = IndexReader.open(dir, true);
|
IndexReader r = IndexReader.open(dir, true);
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests cloning multiple types of readers, modifying the deletedDocs and norms
|
* Tests cloning multiple types of readers, modifying the deletedDocs and norms
|
||||||
|
@ -198,7 +197,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
|
||||||
TestIndexReaderReopen.createIndex(dir1, true);
|
TestIndexReaderReopen.createIndex(dir1, true);
|
||||||
IndexReader reader1 = IndexReader.open(dir1, false);
|
IndexReader reader1 = IndexReader.open(dir1, false);
|
||||||
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.optimize();
|
w.optimize();
|
||||||
w.close();
|
w.close();
|
||||||
IndexReader reader2 = reader1.clone(true);
|
IndexReader reader2 = reader1.clone(true);
|
||||||
|
@ -485,7 +484,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
||||||
|
|
||||||
public void testCloseStoredFields() throws Exception {
|
public void testCloseStoredFields() throws Exception {
|
||||||
final Directory dir = new MockRAMDirectory();
|
final Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
w.setUseCompoundFile(false);
|
w.setUseCompoundFile(false);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
|
doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
|
|
@ -72,7 +72,7 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
||||||
protected void setUp() throws Exception {
|
protected void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
similarityOne = new SimilarityOne();
|
similarityOne = new SimilarityOne();
|
||||||
anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -47,7 +47,6 @@ import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.BitVector;
|
import org.apache.lucene.util.BitVector;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestIndexReaderReopen extends LuceneTestCase {
|
public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
|
|
||||||
|
@ -703,7 +702,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
final Directory dir = new MockRAMDirectory();
|
final Directory dir = new MockRAMDirectory();
|
||||||
final int n = 30;
|
final int n = 30;
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < n; i++) {
|
for (int i = 0; i < n; i++) {
|
||||||
writer.addDocument(createDocument(i, 3));
|
writer.addDocument(createDocument(i, 3));
|
||||||
}
|
}
|
||||||
|
@ -722,7 +721,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
modifier.deleteDocument(i % modifier.maxDoc());
|
modifier.deleteDocument(i % modifier.maxDoc());
|
||||||
modifier.close();
|
modifier.close();
|
||||||
} else {
|
} else {
|
||||||
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
modifier.addDocument(createDocument(n + i, 6));
|
modifier.addDocument(createDocument(n + i, 6));
|
||||||
modifier.close();
|
modifier.close();
|
||||||
}
|
}
|
||||||
|
@ -947,7 +946,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
|
|
||||||
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
|
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
|
||||||
IndexWriter.unlock(dir);
|
IndexWriter.unlock(dir);
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||||
|
|
||||||
|
@ -992,7 +991,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
static void modifyIndex(int i, Directory dir) throws IOException {
|
static void modifyIndex(int i, Directory dir) throws IOException {
|
||||||
switch (i) {
|
switch (i) {
|
||||||
case 0: {
|
case 0: {
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.deleteDocuments(new Term("field2", "a11"));
|
w.deleteDocuments(new Term("field2", "a11"));
|
||||||
w.deleteDocuments(new Term("field2", "b30"));
|
w.deleteDocuments(new Term("field2", "b30"));
|
||||||
w.close();
|
w.close();
|
||||||
|
@ -1007,13 +1006,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case 2: {
|
case 2: {
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.optimize();
|
w.optimize();
|
||||||
w.close();
|
w.close();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case 3: {
|
case 3: {
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.addDocument(createDocument(101, 4));
|
w.addDocument(createDocument(101, 4));
|
||||||
w.optimize();
|
w.optimize();
|
||||||
w.addDocument(createDocument(102, 4));
|
w.addDocument(createDocument(102, 4));
|
||||||
|
@ -1029,7 +1028,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case 5: {
|
case 5: {
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.addDocument(createDocument(101, 4));
|
w.addDocument(createDocument(101, 4));
|
||||||
w.close();
|
w.close();
|
||||||
break;
|
break;
|
||||||
|
@ -1193,7 +1192,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
||||||
|
|
||||||
public void testReopenOnCommit() throws Throwable {
|
public void testReopenOnCommit() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
for(int i=0;i<4;i++) {
|
for(int i=0;i<4;i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -29,7 +29,6 @@ import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestIndexWriterDelete extends LuceneTestCase {
|
public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
|
|
||||||
|
@ -43,7 +42,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
|
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setUseCompoundFile(true);
|
modifier.setUseCompoundFile(true);
|
||||||
modifier.setMaxBufferedDeleteTerms(1);
|
modifier.setMaxBufferedDeleteTerms(1);
|
||||||
|
|
||||||
|
@ -80,7 +79,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
|
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(2);
|
modifier.setMaxBufferedDocs(2);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -115,7 +114,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testMaxBufferedDeletes() throws IOException {
|
public void testMaxBufferedDeletes() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir,
|
IndexWriter writer = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDeleteTerms(1);
|
writer.setMaxBufferedDeleteTerms(1);
|
||||||
writer.deleteDocuments(new Term("foobar", "1"));
|
writer.deleteDocuments(new Term("foobar", "1"));
|
||||||
writer.deleteDocuments(new Term("foobar", "1"));
|
writer.deleteDocuments(new Term("foobar", "1"));
|
||||||
|
@ -130,7 +129,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
for(int t=0;t<2;t++) {
|
for(int t=0;t<2;t++) {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(4);
|
modifier.setMaxBufferedDocs(4);
|
||||||
modifier.setMaxBufferedDeleteTerms(4);
|
modifier.setMaxBufferedDeleteTerms(4);
|
||||||
|
|
||||||
|
@ -172,7 +171,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testBothDeletes() throws IOException {
|
public void testBothDeletes() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(100);
|
modifier.setMaxBufferedDocs(100);
|
||||||
modifier.setMaxBufferedDeleteTerms(100);
|
modifier.setMaxBufferedDeleteTerms(100);
|
||||||
|
|
||||||
|
@ -205,7 +204,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testBatchDeletes() throws IOException {
|
public void testBatchDeletes() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(2);
|
modifier.setMaxBufferedDocs(2);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -249,7 +248,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testDeleteAll() throws IOException {
|
public void testDeleteAll() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(2);
|
modifier.setMaxBufferedDocs(2);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -296,7 +295,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testDeleteAllRollback() throws IOException {
|
public void testDeleteAllRollback() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(2);
|
modifier.setMaxBufferedDocs(2);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -334,7 +333,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
public void testDeleteAllNRT() throws IOException {
|
public void testDeleteAllNRT() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setMaxBufferedDocs(2);
|
modifier.setMaxBufferedDocs(2);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -426,7 +425,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
// First build up a starting index:
|
// First build up a starting index:
|
||||||
MockRAMDirectory startDir = new MockRAMDirectory();
|
MockRAMDirectory startDir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(startDir,
|
IndexWriter writer = new IndexWriter(startDir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
for (int i = 0; i < 157; i++) {
|
for (int i = 0; i < 157; i++) {
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
|
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
|
||||||
|
@ -449,7 +448,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
||||||
dir.setPreventDoubleWrite(false);
|
dir.setPreventDoubleWrite(false);
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
modifier.setMaxBufferedDocs(1000); // use flush or close
|
modifier.setMaxBufferedDocs(1000); // use flush or close
|
||||||
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
|
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
|
||||||
|
@ -655,7 +654,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
|
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
modifier.setUseCompoundFile(true);
|
modifier.setUseCompoundFile(true);
|
||||||
modifier.setMaxBufferedDeleteTerms(2);
|
modifier.setMaxBufferedDeleteTerms(2);
|
||||||
|
|
||||||
|
@ -764,7 +763,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
||||||
|
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
IndexWriter modifier = new IndexWriter(dir,
|
IndexWriter modifier = new IndexWriter(dir,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
dir.failOn(failure.reset());
|
dir.failOn(failure.reset());
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ import java.util.Random;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -135,7 +134,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
public void testRandomExceptions() throws Throwable {
|
public void testRandomExceptions() throws Throwable {
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
|
|
||||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||||
//writer.setMaxBufferedDocs(10);
|
//writer.setMaxBufferedDocs(10);
|
||||||
writer.setRAMBufferSizeMB(0.1);
|
writer.setRAMBufferSizeMB(0.1);
|
||||||
|
@ -173,7 +172,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||||
public void testRandomExceptionsThreads() throws Throwable {
|
public void testRandomExceptionsThreads() throws Throwable {
|
||||||
|
|
||||||
MockRAMDirectory dir = new MockRAMDirectory();
|
MockRAMDirectory dir = new MockRAMDirectory();
|
||||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||||
//writer.setMaxBufferedDocs(10);
|
//writer.setMaxBufferedDocs(10);
|
||||||
writer.setRAMBufferSizeMB(0.2);
|
writer.setRAMBufferSizeMB(0.2);
|
||||||
|
|
|
@ -75,10 +75,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
|
||||||
IndexWriter im;
|
IndexWriter im;
|
||||||
FSDirectory dir = FSDirectory.open(this.__test_dir);
|
FSDirectory dir = FSDirectory.open(this.__test_dir);
|
||||||
try {
|
try {
|
||||||
im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
} catch (FileNotFoundException e) {
|
} catch (FileNotFoundException e) {
|
||||||
try {
|
try {
|
||||||
im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
} catch (FileNotFoundException e1) {
|
} catch (FileNotFoundException e1) {
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -35,7 +34,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testNormalCase() throws IOException {
|
public void testNormalCase() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(10);
|
writer.setMergeFactor(10);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
|
@ -52,7 +51,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testNoOverMerge() throws IOException {
|
public void testNoOverMerge() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(10);
|
writer.setMergeFactor(10);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
|
@ -74,7 +73,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testForceFlush() throws IOException {
|
public void testForceFlush() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(10);
|
writer.setMergeFactor(10);
|
||||||
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
|
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
|
||||||
|
@ -85,7 +84,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
addDoc(writer);
|
addDoc(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergePolicy(mp);
|
writer.setMergePolicy(mp);
|
||||||
mp.setMinMergeDocs(100);
|
mp.setMinMergeDocs(100);
|
||||||
|
@ -100,7 +99,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testMergeFactorChange() throws IOException {
|
public void testMergeFactorChange() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(100);
|
writer.setMergeFactor(100);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
|
@ -126,7 +125,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testMaxBufferedDocsChange() throws IOException {
|
public void testMaxBufferedDocsChange() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(101);
|
writer.setMaxBufferedDocs(101);
|
||||||
writer.setMergeFactor(101);
|
writer.setMergeFactor(101);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
|
@ -140,7 +139,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMaxBufferedDocs(101);
|
writer.setMaxBufferedDocs(101);
|
||||||
writer.setMergeFactor(101);
|
writer.setMergeFactor(101);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
|
@ -171,7 +170,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
public void testMergeDocCount0() throws IOException {
|
public void testMergeDocCount0() throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(100);
|
writer.setMergeFactor(100);
|
||||||
|
@ -186,7 +185,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
||||||
reader.deleteDocuments(new Term("content", "aaa"));
|
reader.deleteDocuments(new Term("content", "aaa"));
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(5);
|
writer.setMergeFactor(5);
|
||||||
|
|
|
@ -56,7 +56,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
|
|
||||||
Directory merged = new MockRAMDirectory();
|
Directory merged = new MockRAMDirectory();
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
|
||||||
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
|
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
|
||||||
|
@ -93,7 +93,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
||||||
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
|
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
|
||||||
{
|
{
|
||||||
|
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
|
|
||||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.util.ThreadInterruptedException;
|
import org.apache.lucene.util.ThreadInterruptedException;
|
||||||
|
|
||||||
|
@ -77,7 +76,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
boolean optimize = true;
|
boolean optimize = true;
|
||||||
|
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
// create the index
|
// create the index
|
||||||
|
@ -112,7 +111,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
assertEquals(0, count(new Term("id", id10), r3));
|
assertEquals(0, count(new Term("id", id10), r3));
|
||||||
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
|
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
|
||||||
|
|
||||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -140,7 +139,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
boolean optimize = false;
|
boolean optimize = false;
|
||||||
|
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
// create the index
|
// create the index
|
||||||
|
@ -149,7 +148,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
// create a 2nd index
|
// create a 2nd index
|
||||||
Directory dir2 = new MockRAMDirectory();
|
Directory dir2 = new MockRAMDirectory();
|
||||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer2.setInfoStream(infoStream);
|
writer2.setInfoStream(infoStream);
|
||||||
createIndexNoClose(!optimize, "index2", writer2);
|
createIndexNoClose(!optimize, "index2", writer2);
|
||||||
|
@ -187,13 +186,13 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
boolean optimize = false;
|
boolean optimize = false;
|
||||||
|
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
|
|
||||||
// create a 2nd index
|
// create a 2nd index
|
||||||
Directory dir2 = new MockRAMDirectory();
|
Directory dir2 = new MockRAMDirectory();
|
||||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer2.setInfoStream(infoStream);
|
writer2.setInfoStream(infoStream);
|
||||||
createIndexNoClose(!optimize, "index2", writer2);
|
createIndexNoClose(!optimize, "index2", writer2);
|
||||||
|
@ -222,7 +221,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
boolean optimize = true;
|
boolean optimize = true;
|
||||||
|
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
// create the index
|
// create the index
|
||||||
|
@ -261,7 +260,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
// reopen the writer to verify the delete made it to the directory
|
// reopen the writer to verify the delete made it to the directory
|
||||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
IndexReader w2r1 = writer.getReader();
|
IndexReader w2r1 = writer.getReader();
|
||||||
|
@ -276,7 +275,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
int numDirs = 3;
|
int numDirs = 3;
|
||||||
|
|
||||||
Directory mainDir = new MockRAMDirectory();
|
Directory mainDir = new MockRAMDirectory();
|
||||||
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
mainWriter.setInfoStream(infoStream);
|
mainWriter.setInfoStream(infoStream);
|
||||||
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
|
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
|
||||||
|
@ -384,7 +383,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
this.numDirs = numDirs;
|
this.numDirs = numDirs;
|
||||||
this.mainWriter = mainWriter;
|
this.mainWriter = mainWriter;
|
||||||
addDir = new MockRAMDirectory();
|
addDir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
for (int i = 0; i < NUM_INIT_DOCS; i++) {
|
for (int i = 0; i < NUM_INIT_DOCS; i++) {
|
||||||
|
@ -492,7 +491,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
|
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
IndexReader r1 = writer.getReader();
|
IndexReader r1 = writer.getReader();
|
||||||
|
@ -530,7 +529,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
// test whether the changes made it to the directory
|
// test whether the changes made it to the directory
|
||||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
IndexReader w2r1 = writer.getReader();
|
IndexReader w2r1 = writer.getReader();
|
||||||
// insure the deletes were actually flushed to the directory
|
// insure the deletes were actually flushed to the directory
|
||||||
|
@ -571,7 +570,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public static void createIndex(Directory dir1, String indexName,
|
public static void createIndex(Directory dir1, String indexName,
|
||||||
boolean multiSegment) throws IOException {
|
boolean multiSegment) throws IOException {
|
||||||
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
|
@ -606,7 +605,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
public void testMergeWarmer() throws Exception {
|
public void testMergeWarmer() throws Exception {
|
||||||
|
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
|
|
||||||
|
@ -641,7 +640,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
public void testAfterCommit() throws Exception {
|
public void testAfterCommit() throws Exception {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
|
|
||||||
|
@ -674,7 +673,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
// Make sure reader remains usable even if IndexWriter closes
|
// Make sure reader remains usable even if IndexWriter closes
|
||||||
public void testAfterClose() throws Exception {
|
public void testAfterClose() throws Exception {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
|
|
||||||
|
@ -704,7 +703,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
// Stress test reopen during addIndexes
|
// Stress test reopen during addIndexes
|
||||||
public void testDuringAddIndexes() throws Exception {
|
public void testDuringAddIndexes() throws Exception {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
@ -782,7 +781,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
// Stress test reopen during add/delete
|
// Stress test reopen during add/delete
|
||||||
public void testDuringAddDelete() throws Exception {
|
public void testDuringAddDelete() throws Exception {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setInfoStream(infoStream);
|
writer.setInfoStream(infoStream);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
@ -863,7 +862,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
public void testExpungeDeletes() throws Throwable {
|
public void testExpungeDeletes() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -888,7 +887,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
||||||
|
|
||||||
public void testDeletesNumDocs() throws Throwable {
|
public void testDeletesNumDocs() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
|
@ -64,7 +63,7 @@ public class TestLazyBug extends LuceneTestCase {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
try {
|
try {
|
||||||
Random r = newRandom();
|
Random r = newRandom();
|
||||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests lazy skipping on the proximity file.
|
* Tests lazy skipping on the proximity file.
|
||||||
|
@ -61,7 +60,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
||||||
int numDocs = 500;
|
int numDocs = 500;
|
||||||
|
|
||||||
Directory directory = new SeekCountingDirectory();
|
Directory directory = new SeekCountingDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
|
@ -119,7 +118,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
||||||
|
|
||||||
public void testSeek() throws IOException {
|
public void testSeek() throws IOException {
|
||||||
Directory directory = new RAMDirectory();
|
Directory directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < 10; i++) {
|
for (int i = 0; i < 10; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
|
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This testcase tests whether multi-level skipping is being used
|
* This testcase tests whether multi-level skipping is being used
|
||||||
|
@ -92,7 +91,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
|
||||||
private static class PayloadAnalyzer extends Analyzer {
|
private static class PayloadAnalyzer extends Analyzer {
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
return new PayloadFilter(new LowerCaseTokenizer(Version.LUCENE_CURRENT, reader));
|
return new PayloadFilter(new LowerCaseTokenizer(TEST_VERSION_CURRENT, reader));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestNRTReaderWithThreads extends LuceneTestCase {
|
public class TestNRTReaderWithThreads extends LuceneTestCase {
|
||||||
Random random = new Random();
|
Random random = new Random();
|
||||||
|
@ -33,7 +32,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
|
||||||
|
|
||||||
public void testIndexing() throws Exception {
|
public void testIndexing() throws Exception {
|
||||||
Directory mainDir = new MockRAMDirectory();
|
Directory mainDir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setUseCompoundFile(false);
|
writer.setUseCompoundFile(false);
|
||||||
IndexReader reader = writer.getReader(); // start pooling readers
|
IndexReader reader = writer.getReader(); // start pooling readers
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class TestNorms extends LuceneTestCase {
|
||||||
protected void setUp() throws Exception {
|
protected void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
similarityOne = new SimilarityOne();
|
similarityOne = new SimilarityOne();
|
||||||
anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// omitTermFreqAndPositions bit in the FieldInfo
|
// omitTermFreqAndPositions bit in the FieldInfo
|
||||||
public void testOmitTermFreqAndPositions() throws Exception {
|
public void testOmitTermFreqAndPositions() throws Exception {
|
||||||
Directory ram = new MockRAMDirectory();
|
Directory ram = new MockRAMDirectory();
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
|
|
||||||
|
@ -112,7 +112,7 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// omitTermFreqAndPositions for the same field works
|
// omitTermFreqAndPositions for the same field works
|
||||||
public void testMixedMerge() throws Exception {
|
public void testMixedMerge() throws Exception {
|
||||||
Directory ram = new MockRAMDirectory();
|
Directory ram = new MockRAMDirectory();
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(3);
|
writer.setMaxBufferedDocs(3);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
@ -165,7 +165,7 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// field,
|
// field,
|
||||||
public void testMixedRAM() throws Exception {
|
public void testMixedRAM() throws Exception {
|
||||||
Directory ram = new MockRAMDirectory();
|
Directory ram = new MockRAMDirectory();
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
@ -213,7 +213,7 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// Verifies no *.prx exists when all fields omit term freq:
|
// Verifies no *.prx exists when all fields omit term freq:
|
||||||
public void testNoPrxFile() throws Throwable {
|
public void testNoPrxFile() throws Throwable {
|
||||||
Directory ram = new MockRAMDirectory();
|
Directory ram = new MockRAMDirectory();
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(3);
|
writer.setMaxBufferedDocs(3);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
|
@ -244,7 +244,7 @@ public class TestOmitTf extends LuceneTestCase {
|
||||||
// Test scores with one field with Term Freqs and one without, otherwise with equal content
|
// Test scores with one field with Term Freqs and one without, otherwise with equal content
|
||||||
public void testBasic() throws Exception {
|
public void testBasic() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMergeFactor(2);
|
writer.setMergeFactor(2);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
|
|
|
@ -106,7 +106,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
|
|
||||||
// one document only:
|
// one document only:
|
||||||
Directory dir2 = new MockRAMDirectory();
|
Directory dir2 = new MockRAMDirectory();
|
||||||
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d3 = new Document();
|
Document d3 = new Document();
|
||||||
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
w2.addDocument(d3);
|
w2.addDocument(d3);
|
||||||
|
@ -151,13 +151,13 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
Directory dir2 = getDir2();
|
Directory dir2 = getDir2();
|
||||||
|
|
||||||
// add another document to ensure that the indexes are not optimized
|
// add another document to ensure that the indexes are not optimized
|
||||||
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
modifier.addDocument(d);
|
modifier.addDocument(d);
|
||||||
modifier.close();
|
modifier.close();
|
||||||
|
|
||||||
modifier = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
d = new Document();
|
d = new Document();
|
||||||
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
|
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
modifier.addDocument(d);
|
modifier.addDocument(d);
|
||||||
|
@ -170,7 +170,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
assertFalse(pr.isOptimized());
|
assertFalse(pr.isOptimized());
|
||||||
pr.close();
|
pr.close();
|
||||||
|
|
||||||
modifier = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
modifier.optimize();
|
modifier.optimize();
|
||||||
modifier.close();
|
modifier.close();
|
||||||
|
|
||||||
|
@ -182,7 +182,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
pr.close();
|
pr.close();
|
||||||
|
|
||||||
|
|
||||||
modifier = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
modifier.optimize();
|
modifier.optimize();
|
||||||
modifier.close();
|
modifier.close();
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
// Fields 1-4 indexed together:
|
// Fields 1-4 indexed together:
|
||||||
private Searcher single() throws IOException {
|
private Searcher single() throws IOException {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d1 = new Document();
|
Document d1 = new Document();
|
||||||
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
@ -263,7 +263,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
|
|
||||||
private Directory getDir1() throws IOException {
|
private Directory getDir1() throws IOException {
|
||||||
Directory dir1 = new MockRAMDirectory();
|
Directory dir1 = new MockRAMDirectory();
|
||||||
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d1 = new Document();
|
Document d1 = new Document();
|
||||||
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
@ -278,7 +278,7 @@ public class TestParallelReader extends LuceneTestCase {
|
||||||
|
|
||||||
private Directory getDir2() throws IOException {
|
private Directory getDir2() throws IOException {
|
||||||
Directory dir2 = new RAMDirectory();
|
Directory dir2 = new RAMDirectory();
|
||||||
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document d3 = new Document();
|
Document d3 = new Document();
|
||||||
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
|
@ -48,7 +47,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
public void testEmptyIndex() throws IOException {
|
public void testEmptyIndex() throws IOException {
|
||||||
RAMDirectory rd1 = new MockRAMDirectory();
|
RAMDirectory rd1 = new MockRAMDirectory();
|
||||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
iw.close();
|
iw.close();
|
||||||
|
|
||||||
|
@ -56,7 +55,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
|
|
||||||
RAMDirectory rdOut = new MockRAMDirectory();
|
RAMDirectory rdOut = new MockRAMDirectory();
|
||||||
|
|
||||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
ParallelReader pr = new ParallelReader();
|
ParallelReader pr = new ParallelReader();
|
||||||
pr.add(IndexReader.open(rd1,true));
|
pr.add(IndexReader.open(rd1,true));
|
||||||
|
@ -81,7 +80,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
public void testEmptyIndexWithVectors() throws IOException {
|
public void testEmptyIndexWithVectors() throws IOException {
|
||||||
RAMDirectory rd1 = new MockRAMDirectory();
|
RAMDirectory rd1 = new MockRAMDirectory();
|
||||||
{
|
{
|
||||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
|
doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
|
||||||
|
@ -96,7 +95,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
ir.deleteDocument(0);
|
ir.deleteDocument(0);
|
||||||
ir.close();
|
ir.close();
|
||||||
|
|
||||||
iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), false,
|
iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), false,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
iw.optimize();
|
iw.optimize();
|
||||||
iw.close();
|
iw.close();
|
||||||
|
@ -104,7 +103,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
|
|
||||||
RAMDirectory rd2 = new MockRAMDirectory();
|
RAMDirectory rd2 = new MockRAMDirectory();
|
||||||
{
|
{
|
||||||
IndexWriter iw = new IndexWriter(rd2, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iw = new IndexWriter(rd2, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
iw.addDocument(doc);
|
iw.addDocument(doc);
|
||||||
|
@ -113,7 +112,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
||||||
|
|
||||||
RAMDirectory rdOut = new MockRAMDirectory();
|
RAMDirectory rdOut = new MockRAMDirectory();
|
||||||
|
|
||||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.UNLIMITED);
|
MaxFieldLength.UNLIMITED);
|
||||||
ParallelReader pr = new ParallelReader();
|
ParallelReader pr = new ParallelReader();
|
||||||
pr.add(IndexReader.open(rd1,true));
|
pr.add(IndexReader.open(rd1,true));
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -39,7 +37,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
Document doc;
|
Document doc;
|
||||||
|
|
||||||
RAMDirectory rd1 = new RAMDirectory();
|
RAMDirectory rd1 = new RAMDirectory();
|
||||||
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
|
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
|
||||||
|
@ -51,7 +49,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
||||||
|
|
||||||
iw1.close();
|
iw1.close();
|
||||||
RAMDirectory rd2 = new RAMDirectory();
|
RAMDirectory rd2 = new RAMDirectory();
|
||||||
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));
|
doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));
|
||||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.UnicodeUtil;
|
import org.apache.lucene.util.UnicodeUtil;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
|
||||||
|
@ -396,7 +395,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
PayloadData payload = fieldToData.get(fieldName);
|
PayloadData payload = fieldToData.get(fieldName);
|
||||||
TokenStream ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader);
|
TokenStream ts = new WhitespaceTokenizer(TEST_VERSION_CURRENT, reader);
|
||||||
if (payload != null) {
|
if (payload != null) {
|
||||||
if (payload.numFieldInstancesToSkip == 0) {
|
if (payload.numFieldInstancesToSkip == 0) {
|
||||||
ts = new PayloadFilter(ts, payload.data, payload.offset, payload.length);
|
ts = new PayloadFilter(ts, payload.data, payload.offset, payload.length);
|
||||||
|
@ -469,7 +468,7 @@ public class TestPayloads extends LuceneTestCase {
|
||||||
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
|
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
|
||||||
|
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
final String field = "test";
|
final String field = "test";
|
||||||
|
|
||||||
Thread[] ingesters = new Thread[numThreads];
|
Thread[] ingesters = new Thread[numThreads];
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
@ -103,7 +102,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
|
||||||
|
|
||||||
public void testSkipTo(int indexDivisor) throws IOException {
|
public void testSkipTo(int indexDivisor) throws IOException {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
Term ta = new Term("content","aaa");
|
Term ta = new Term("content","aaa");
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -38,7 +36,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
||||||
{
|
{
|
||||||
IndexWriter writer = null;
|
IndexWriter writer = null;
|
||||||
|
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
// ADD 100 documents with term : aaa
|
// ADD 100 documents with term : aaa
|
||||||
// add 100 documents with terms: aaa bbb
|
// add 100 documents with terms: aaa bbb
|
||||||
|
@ -54,7 +52,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
||||||
verifyDocFreq();
|
verifyDocFreq();
|
||||||
|
|
||||||
// merge segments by optimizing the index
|
// merge segments by optimizing the index
|
||||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
|
@ -65,7 +63,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
||||||
public void testPrevTermAtEnd() throws IOException
|
public void testPrevTermAtEnd() throws IOException
|
||||||
{
|
{
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc(writer, "aaa bbb");
|
addDoc(writer, "aaa bbb");
|
||||||
writer.close();
|
writer.close();
|
||||||
SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);
|
SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);
|
||||||
|
|
|
@ -26,7 +26,7 @@ import java.util.Random;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
|
||||||
public class TestStressIndexing extends LuceneTestCase {
|
public class TestStressIndexing extends LuceneTestCase {
|
||||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
private Random RANDOM;
|
private Random RANDOM;
|
||||||
|
|
||||||
private static abstract class TimedThread extends Thread {
|
private static abstract class TimedThread extends Thread {
|
||||||
|
|
|
@ -19,7 +19,6 @@ import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.analysis.*;
|
import org.apache.lucene.analysis.*;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.util.StringHelper;
|
import org.apache.lucene.util.StringHelper;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
|
@ -124,7 +123,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
||||||
|
|
||||||
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
||||||
Map<String,Document> docs = new HashMap<String,Document>();
|
Map<String,Document> docs = new HashMap<String,Document>();
|
||||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
w.setUseCompoundFile(false);
|
w.setUseCompoundFile(false);
|
||||||
|
|
||||||
/***
|
/***
|
||||||
|
@ -176,7 +175,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
||||||
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
||||||
Map<String,Document> docs = new HashMap<String,Document>();
|
Map<String,Document> docs = new HashMap<String,Document>();
|
||||||
for(int iter=0;iter<3;iter++) {
|
for(int iter=0;iter<3;iter++) {
|
||||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
w.setUseCompoundFile(false);
|
w.setUseCompoundFile(false);
|
||||||
|
|
||||||
// force many merges
|
// force many merges
|
||||||
|
@ -219,7 +218,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
||||||
|
|
||||||
|
|
||||||
public static void indexSerial(Map<String,Document> docs, Directory dir) throws IOException {
|
public static void indexSerial(Map<String,Document> docs, Directory dir) throws IOException {
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
// index all docs in a single thread
|
// index all docs in a single thread
|
||||||
Iterator<Document> iter = docs.values().iterator();
|
Iterator<Document> iter = docs.values().iterator();
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.util.English;
|
import org.apache.lucene.util.English;
|
||||||
|
|
||||||
|
@ -35,7 +34,7 @@ import java.io.File;
|
||||||
|
|
||||||
public class TestThreadedOptimize extends LuceneTestCase {
|
public class TestThreadedOptimize extends LuceneTestCase {
|
||||||
|
|
||||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
private final static int NUM_THREADS = 3;
|
private final static int NUM_THREADS = 3;
|
||||||
//private final static int NUM_THREADS = 5;
|
//private final static int NUM_THREADS = 5;
|
||||||
|
|
|
@ -27,8 +27,6 @@ import java.util.Map;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -67,7 +65,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
||||||
if (last==null)
|
if (last==null)
|
||||||
throw new RuntimeException("Couldn't find commit point "+id);
|
throw new RuntimeException("Couldn't find commit point "+id);
|
||||||
|
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
new RollbackDeletionPolicy(id), MaxFieldLength.UNLIMITED, last);
|
new RollbackDeletionPolicy(id), MaxFieldLength.UNLIMITED, last);
|
||||||
Map<String,String> data = new HashMap<String,String>();
|
Map<String,String> data = new HashMap<String,String>();
|
||||||
data.put("index", "Rolled back to 1-"+id);
|
data.put("index", "Rolled back to 1-"+id);
|
||||||
|
@ -129,7 +127,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
||||||
|
|
||||||
//Build index, of records 1 to 100, committing after each batch of 10
|
//Build index, of records 1 to 100, committing after each batch of 10
|
||||||
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
|
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
|
||||||
IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(Version.LUCENE_CURRENT),sdp,MaxFieldLength.UNLIMITED);
|
IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),sdp,MaxFieldLength.UNLIMITED);
|
||||||
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
|
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
|
||||||
Document doc=new Document();
|
Document doc=new Document();
|
||||||
doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
|
doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
|
||||||
|
@ -197,7 +195,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
||||||
for(int i=0;i<2;i++) {
|
for(int i=0;i<2;i++) {
|
||||||
// Unless you specify a prior commit point, rollback
|
// Unless you specify a prior commit point, rollback
|
||||||
// should not work:
|
// should not work:
|
||||||
new IndexWriter(dir,new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
new IndexWriter(dir,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
new DeleteLastCommitPolicy(),
|
new DeleteLastCommitPolicy(),
|
||||||
MaxFieldLength.UNLIMITED).close();
|
MaxFieldLength.UNLIMITED).close();
|
||||||
IndexReader r = IndexReader.open(dir, true);
|
IndexReader r = IndexReader.open(dir, true);
|
||||||
|
|
|
@ -88,12 +88,12 @@ public class TestTransactions extends LuceneTestCase
|
||||||
@Override
|
@Override
|
||||||
public void doWork() throws Throwable {
|
public void doWork() throws Throwable {
|
||||||
|
|
||||||
IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer1.setMaxBufferedDocs(3);
|
writer1.setMaxBufferedDocs(3);
|
||||||
writer1.setMergeFactor(2);
|
writer1.setMergeFactor(2);
|
||||||
((ConcurrentMergeScheduler) writer1.getMergeScheduler()).setSuppressExceptions();
|
((ConcurrentMergeScheduler) writer1.getMergeScheduler()).setSuppressExceptions();
|
||||||
|
|
||||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
// Intentionally use different params so flush/merge
|
// Intentionally use different params so flush/merge
|
||||||
// happen @ different times
|
// happen @ different times
|
||||||
writer2.setMaxBufferedDocs(2);
|
writer2.setMaxBufferedDocs(2);
|
||||||
|
@ -178,7 +178,7 @@ public class TestTransactions extends LuceneTestCase
|
||||||
}
|
}
|
||||||
|
|
||||||
public void initIndex(Directory dir) throws Throwable {
|
public void initIndex(Directory dir) throws Throwable {
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for(int j=0; j<7; j++) {
|
for(int j=0; j<7; j++) {
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
int n = RANDOM.nextInt();
|
int n = RANDOM.nextInt();
|
||||||
|
|
|
@ -44,7 +44,7 @@ public class TestMultiAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
public void testMultiAnalyzer() throws ParseException {
|
public void testMultiAnalyzer() throws ParseException {
|
||||||
|
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "", new MultiAnalyzer());
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "", new MultiAnalyzer());
|
||||||
|
|
||||||
// trivial, no multiple tokens:
|
// trivial, no multiple tokens:
|
||||||
assertEquals("foo", qp.parse("foo").toString());
|
assertEquals("foo", qp.parse("foo").toString());
|
||||||
|
@ -135,9 +135,9 @@ public class TestMultiAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
TokenStream result = new StandardTokenizer(Version.LUCENE_CURRENT, reader);
|
TokenStream result = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
|
||||||
result = new TestFilter(result);
|
result = new TestFilter(result);
|
||||||
result = new LowerCaseFilter(Version.LUCENE_CURRENT, result);
|
result = new LowerCaseFilter(TEST_VERSION_CURRENT, result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -203,9 +203,9 @@ public class TestMultiAnalyzer extends BaseTokenStreamTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
TokenStream result = new StandardTokenizer(Version.LUCENE_CURRENT, reader);
|
TokenStream result = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
|
||||||
result = new TestPosIncrementFilter(result);
|
result = new TestPosIncrementFilter(result);
|
||||||
result = new LowerCaseFilter(Version.LUCENE_CURRENT, result);
|
result = new LowerCaseFilter(TEST_VERSION_CURRENT, result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -242,7 +242,7 @@ public class TestMultiAnalyzer extends BaseTokenStreamTestCase {
|
||||||
private final static class DumbQueryParser extends QueryParser {
|
private final static class DumbQueryParser extends QueryParser {
|
||||||
|
|
||||||
public DumbQueryParser(String f, Analyzer a) {
|
public DumbQueryParser(String f, Analyzer a) {
|
||||||
super(Version.LUCENE_CURRENT, f, a);
|
super(TEST_VERSION_CURRENT, f, a);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** expose super's version */
|
/** expose super's version */
|
||||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests QueryParser.
|
* Tests QueryParser.
|
||||||
|
@ -60,18 +59,18 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
Occur occur[] = {Occur.SHOULD, Occur.SHOULD};
|
Occur occur[] = {Occur.SHOULD, Occur.SHOULD};
|
||||||
TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
|
TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
|
||||||
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, a);
|
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a);
|
||||||
|
|
||||||
Query q = mfqp.parse(qtxt);
|
Query q = mfqp.parse(qtxt);
|
||||||
assertEquals(expectedRes, q.toString());
|
assertEquals(expectedRes, q.toString());
|
||||||
|
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, qtxt, fields, occur, a);
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a);
|
||||||
assertEquals(expectedRes, q.toString());
|
assertEquals(expectedRes, q.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
|
|
||||||
Query q = mfqp.parse("one");
|
Query q = mfqp.parse("one");
|
||||||
assertEquals("b:one t:one", q.toString());
|
assertEquals("b:one t:one", q.toString());
|
||||||
|
@ -134,7 +133,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
boosts.put("b", Float.valueOf(5));
|
boosts.put("b", Float.valueOf(5));
|
||||||
boosts.put("t", Float.valueOf(10));
|
boosts.put("t", Float.valueOf(10));
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), boosts);
|
MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT), boosts);
|
||||||
|
|
||||||
|
|
||||||
//Check for simple
|
//Check for simple
|
||||||
|
@ -160,24 +159,24 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
public void testStaticMethod1() throws ParseException {
|
public void testStaticMethod1() throws ParseException {
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
String[] queries = {"one", "two"};
|
String[] queries = {"one", "two"};
|
||||||
Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("b:one t:two", q.toString());
|
assertEquals("b:one t:two", q.toString());
|
||||||
|
|
||||||
String[] queries2 = {"+one", "+two"};
|
String[] queries2 = {"+one", "+two"};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries2, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("(+b:one) (+t:two)", q.toString());
|
assertEquals("(+b:one) (+t:two)", q.toString());
|
||||||
|
|
||||||
String[] queries3 = {"one", "+two"};
|
String[] queries3 = {"one", "+two"};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries3, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("b:one (+t:two)", q.toString());
|
assertEquals("b:one (+t:two)", q.toString());
|
||||||
|
|
||||||
String[] queries4 = {"one +more", "+two"};
|
String[] queries4 = {"one +more", "+two"};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries4, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("(b:one +b:more) (+t:two)", q.toString());
|
assertEquals("(b:one +b:more) (+t:two)", q.toString());
|
||||||
|
|
||||||
String[] queries5 = {"blah"};
|
String[] queries5 = {"blah"};
|
||||||
try {
|
try {
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries5, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
fail();
|
fail();
|
||||||
} catch(IllegalArgumentException e) {
|
} catch(IllegalArgumentException e) {
|
||||||
// expected exception, array length differs
|
// expected exception, array length differs
|
||||||
|
@ -187,11 +186,11 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
|
TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
|
||||||
|
|
||||||
String[] queries6 = {"((+stop))", "+((stop))"};
|
String[] queries6 = {"((+stop))", "+((stop))"};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries6, fields, stopA);
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries6, fields, stopA);
|
||||||
assertEquals("", q.toString());
|
assertEquals("", q.toString());
|
||||||
|
|
||||||
String[] queries7 = {"one ((+stop)) +more", "+((stop)) +two"};
|
String[] queries7 = {"one ((+stop)) +more", "+((stop)) +two"};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries7, fields, stopA);
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries7, fields, stopA);
|
||||||
assertEquals("(b:one +b:more) (+t:two)", q.toString());
|
assertEquals("(b:one +b:more) (+t:two)", q.toString());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -199,15 +198,15 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
public void testStaticMethod2() throws ParseException {
|
public void testStaticMethod2() throws ParseException {
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
||||||
Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("+b:one -t:one", q.toString());
|
assertEquals("+b:one -t:one", q.toString());
|
||||||
|
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
fail();
|
fail();
|
||||||
} catch(IllegalArgumentException e) {
|
} catch(IllegalArgumentException e) {
|
||||||
// expected exception, array length differs
|
// expected exception, array length differs
|
||||||
|
@ -219,15 +218,15 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
//int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
|
//int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
|
||||||
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
||||||
|
|
||||||
Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));//, fields, flags, new StandardAnalyzer());
|
Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));//, fields, flags, new StandardAnalyzer());
|
||||||
assertEquals("+b:one -t:one", q.toString());
|
assertEquals("+b:one -t:one", q.toString());
|
||||||
|
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
fail();
|
fail();
|
||||||
} catch(IllegalArgumentException e) {
|
} catch(IllegalArgumentException e) {
|
||||||
// expected exception, array length differs
|
// expected exception, array length differs
|
||||||
|
@ -239,12 +238,12 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
String[] fields = {"f1", "f2", "f3"};
|
String[] fields = {"f1", "f2", "f3"};
|
||||||
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
|
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
|
||||||
BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
|
BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
|
||||||
Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("+f1:one -f2:two f3:three", q.toString());
|
assertEquals("+f1:one -f2:two f3:three", q.toString());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
fail();
|
fail();
|
||||||
} catch(IllegalArgumentException e) {
|
} catch(IllegalArgumentException e) {
|
||||||
// expected exception, array length differs
|
// expected exception, array length differs
|
||||||
|
@ -255,12 +254,12 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
String[] queries = {"one", "two"};
|
String[] queries = {"one", "two"};
|
||||||
String[] fields = {"b", "t"};
|
String[] fields = {"b", "t"};
|
||||||
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
|
||||||
Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals("+b:one -t:two", q.toString());
|
assertEquals("+b:one -t:two", q.toString());
|
||||||
|
|
||||||
try {
|
try {
|
||||||
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
|
||||||
q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
fail();
|
fail();
|
||||||
} catch(IllegalArgumentException e) {
|
} catch(IllegalArgumentException e) {
|
||||||
// expected exception, array length differs
|
// expected exception, array length differs
|
||||||
|
@ -269,7 +268,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
|
|
||||||
public void testAnalyzerReturningNull() throws ParseException {
|
public void testAnalyzerReturningNull() throws ParseException {
|
||||||
String[] fields = new String[] { "f1", "f2", "f3" };
|
String[] fields = new String[] { "f1", "f2", "f3" };
|
||||||
MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull());
|
MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull());
|
||||||
Query q = parser.parse("bla AND blo");
|
Query q = parser.parse("bla AND blo");
|
||||||
assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
|
assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
|
||||||
// the following queries are not affected as their terms are not analyzed anyway:
|
// the following queries are not affected as their terms are not analyzed anyway:
|
||||||
|
@ -282,7 +281,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStopWordSearching() throws Exception {
|
public void testStopWordSearching() throws Exception {
|
||||||
Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
Directory ramDir = new RAMDirectory();
|
Directory ramDir = new RAMDirectory();
|
||||||
IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -291,7 +290,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
iw.close();
|
iw.close();
|
||||||
|
|
||||||
MultiFieldQueryParser mfqp =
|
MultiFieldQueryParser mfqp =
|
||||||
new MultiFieldQueryParser(Version.LUCENE_CURRENT, new String[] {"body"}, analyzer);
|
new MultiFieldQueryParser(TEST_VERSION_CURRENT, new String[] {"body"}, analyzer);
|
||||||
mfqp.setDefaultOperator(QueryParser.Operator.AND);
|
mfqp.setDefaultOperator(QueryParser.Operator.AND);
|
||||||
Query q = mfqp.parse("the footest");
|
Query q = mfqp.parse("the footest");
|
||||||
IndexSearcher is = new IndexSearcher(ramDir, true);
|
IndexSearcher is = new IndexSearcher(ramDir, true);
|
||||||
|
@ -304,7 +303,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
|
||||||
* Return empty tokens for field "f1".
|
* Return empty tokens for field "f1".
|
||||||
*/
|
*/
|
||||||
private static class AnalyzerReturningNull extends Analyzer {
|
private static class AnalyzerReturningNull extends Analyzer {
|
||||||
StandardAnalyzer stdAnalyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer stdAnalyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
public AnalyzerReturningNull() {
|
public AnalyzerReturningNull() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,6 @@ import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.LocalizedTestCase;
|
import org.apache.lucene.util.LocalizedTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests QueryParser.
|
* Tests QueryParser.
|
||||||
|
@ -128,13 +127,13 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
/** Filters LowerCaseTokenizer with StopFilter. */
|
/** Filters LowerCaseTokenizer with StopFilter. */
|
||||||
@Override
|
@Override
|
||||||
public final TokenStream tokenStream(String fieldName, Reader reader) {
|
public final TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
return new QPTestFilter(new LowerCaseTokenizer(Version.LUCENE_CURRENT, reader));
|
return new QPTestFilter(new LowerCaseTokenizer(TEST_VERSION_CURRENT, reader));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class QPTestParser extends QueryParser {
|
public static class QPTestParser extends QueryParser {
|
||||||
public QPTestParser(String f, Analyzer a) {
|
public QPTestParser(String f, Analyzer a) {
|
||||||
super(Version.LUCENE_CURRENT, f, a);
|
super(TEST_VERSION_CURRENT, f, a);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -158,8 +157,8 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public QueryParser getParser(Analyzer a) throws Exception {
|
public QueryParser getParser(Analyzer a) throws Exception {
|
||||||
if (a == null)
|
if (a == null)
|
||||||
a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
a = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", a);
|
||||||
qp.setDefaultOperator(QueryParser.OR_OPERATOR);
|
qp.setDefaultOperator(QueryParser.OR_OPERATOR);
|
||||||
return qp;
|
return qp;
|
||||||
}
|
}
|
||||||
|
@ -228,8 +227,8 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
public Query getQueryDOA(String query, Analyzer a)
|
public Query getQueryDOA(String query, Analyzer a)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
if (a == null)
|
if (a == null)
|
||||||
a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
a = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", a);
|
||||||
qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
||||||
return qp.parse(query);
|
return qp.parse(query);
|
||||||
}
|
}
|
||||||
|
@ -253,8 +252,8 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public void testSimple() throws Exception {
|
public void testSimple() throws Exception {
|
||||||
assertQueryEquals("term term term", null, "term term term");
|
assertQueryEquals("term term term", null, "term term term");
|
||||||
assertQueryEquals("türm term term", new WhitespaceAnalyzer(Version.LUCENE_CURRENT), "türm term term");
|
assertQueryEquals("türm term term", new WhitespaceAnalyzer(TEST_VERSION_CURRENT), "türm term term");
|
||||||
assertQueryEquals("ümlaut", new WhitespaceAnalyzer(Version.LUCENE_CURRENT), "ümlaut");
|
assertQueryEquals("ümlaut", new WhitespaceAnalyzer(TEST_VERSION_CURRENT), "ümlaut");
|
||||||
|
|
||||||
assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
|
assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
|
||||||
assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
|
assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
|
||||||
|
@ -301,7 +300,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
|
assertQueryEquals("+title:(dog OR cat) -author:\"bob dole\"", null,
|
||||||
"+(title:dog title:cat) -author:\"bob dole\"");
|
"+(title:dog title:cat) -author:\"bob dole\"");
|
||||||
|
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
// make sure OR is the default:
|
// make sure OR is the default:
|
||||||
assertEquals(QueryParser.OR_OPERATOR, qp.getDefaultOperator());
|
assertEquals(QueryParser.OR_OPERATOR, qp.getDefaultOperator());
|
||||||
qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
||||||
|
@ -311,7 +310,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPunct() throws Exception {
|
public void testPunct() throws Exception {
|
||||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertQueryEquals("a&b", a, "a&b");
|
assertQueryEquals("a&b", a, "a&b");
|
||||||
assertQueryEquals("a&&b", a, "a&&b");
|
assertQueryEquals("a&&b", a, "a&&b");
|
||||||
assertQueryEquals(".NET", a, ".NET");
|
assertQueryEquals(".NET", a, ".NET");
|
||||||
|
@ -331,7 +330,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
assertQueryEquals("term 1.0 1 2", null, "term");
|
assertQueryEquals("term 1.0 1 2", null, "term");
|
||||||
assertQueryEquals("term term1 term2", null, "term term term");
|
assertQueryEquals("term term1 term2", null, "term term term");
|
||||||
|
|
||||||
Analyzer a = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
assertQueryEquals("3", a, "3");
|
assertQueryEquals("3", a, "3");
|
||||||
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
|
assertQueryEquals("term 1.0 1 2", a, "term 1.0 1 2");
|
||||||
assertQueryEquals("term term1 term2", a, "term term1 term2");
|
assertQueryEquals("term term1 term2", a, "term term1 term2");
|
||||||
|
@ -456,7 +455,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
assertQueryEquals("[ a TO z]", null, "[a TO z]");
|
assertQueryEquals("[ a TO z]", null, "[a TO z]");
|
||||||
assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod());
|
assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod());
|
||||||
|
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new SimpleAnalyzer(TEST_VERSION_CURRENT));
|
||||||
qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
||||||
assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]")).getRewriteMethod());
|
assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]")).getRewriteMethod());
|
||||||
|
|
||||||
|
@ -473,7 +472,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
public void testFarsiRangeCollating() throws Exception {
|
public void testFarsiRangeCollating() throws Exception {
|
||||||
|
|
||||||
RAMDirectory ramDir = new RAMDirectory();
|
RAMDirectory ramDir = new RAMDirectory();
|
||||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("content","\u0633\u0627\u0628",
|
doc.add(new Field("content","\u0633\u0627\u0628",
|
||||||
|
@ -482,7 +481,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
iw.close();
|
iw.close();
|
||||||
IndexSearcher is = new IndexSearcher(ramDir, true);
|
IndexSearcher is = new IndexSearcher(ramDir, true);
|
||||||
|
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "content", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "content", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
|
|
||||||
// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
|
// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
|
||||||
// RuleBasedCollator. However, the Arabic Locale seems to order the Farsi
|
// RuleBasedCollator. However, the Arabic Locale seems to order the Farsi
|
||||||
|
@ -580,7 +579,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
final String defaultField = "default";
|
final String defaultField = "default";
|
||||||
final String monthField = "month";
|
final String monthField = "month";
|
||||||
final String hourField = "hour";
|
final String hourField = "hour";
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new SimpleAnalyzer(TEST_VERSION_CURRENT));
|
||||||
|
|
||||||
// Don't set any date resolution and verify if DateField is used
|
// Don't set any date resolution and verify if DateField is used
|
||||||
assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
|
assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
|
||||||
|
@ -621,7 +620,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEscaped() throws Exception {
|
public void testEscaped() throws Exception {
|
||||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
/*assertQueryEquals("\\[brackets", a, "\\[brackets");
|
/*assertQueryEquals("\\[brackets", a, "\\[brackets");
|
||||||
assertQueryEquals("\\[brackets", null, "brackets");
|
assertQueryEquals("\\[brackets", null, "brackets");
|
||||||
|
@ -715,7 +714,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testQueryStringEscaping() throws Exception {
|
public void testQueryStringEscaping() throws Exception {
|
||||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
|
assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
|
||||||
assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
|
assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
|
||||||
|
@ -802,8 +801,8 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
throws Exception {
|
throws Exception {
|
||||||
Set<Object> stopWords = new HashSet<Object>(1);
|
Set<Object> stopWords = new HashSet<Object>(1);
|
||||||
stopWords.add("on");
|
stopWords.add("on");
|
||||||
StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, stopWords);
|
StandardAnalyzer oneStopAnalyzer = new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords);
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", oneStopAnalyzer);
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", oneStopAnalyzer);
|
||||||
Query q = qp.parse("on^1.0");
|
Query q = qp.parse("on^1.0");
|
||||||
assertNotNull(q);
|
assertNotNull(q);
|
||||||
q = qp.parse("\"hello\"^2.0");
|
q = qp.parse("\"hello\"^2.0");
|
||||||
|
@ -815,7 +814,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
q = qp.parse("\"on\"^1.0");
|
q = qp.parse("\"on\"^1.0");
|
||||||
assertNotNull(q);
|
assertNotNull(q);
|
||||||
|
|
||||||
QueryParser qp2 = new QueryParser(Version.LUCENE_CURRENT, "field", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
QueryParser qp2 = new QueryParser(TEST_VERSION_CURRENT, "field", new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
q = qp2.parse("the^3");
|
q = qp2.parse("the^3");
|
||||||
// "the" is a stop word so the result is an empty query:
|
// "the" is a stop word so the result is an empty query:
|
||||||
assertNotNull(q);
|
assertNotNull(q);
|
||||||
|
@ -844,7 +843,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public void testCustomQueryParserWildcard() {
|
public void testCustomQueryParserWildcard() {
|
||||||
try {
|
try {
|
||||||
new QPTestParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).parse("a?t");
|
new QPTestParser("contents", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("a?t");
|
||||||
fail("Wildcard queries should not be allowed");
|
fail("Wildcard queries should not be allowed");
|
||||||
} catch (ParseException expected) {
|
} catch (ParseException expected) {
|
||||||
// expected exception
|
// expected exception
|
||||||
|
@ -853,7 +852,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public void testCustomQueryParserFuzzy() throws Exception {
|
public void testCustomQueryParserFuzzy() throws Exception {
|
||||||
try {
|
try {
|
||||||
new QPTestParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).parse("xunit~");
|
new QPTestParser("contents", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("xunit~");
|
||||||
fail("Fuzzy queries should not be allowed");
|
fail("Fuzzy queries should not be allowed");
|
||||||
} catch (ParseException expected) {
|
} catch (ParseException expected) {
|
||||||
// expected exception
|
// expected exception
|
||||||
|
@ -863,7 +862,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
public void testBooleanQuery() throws Exception {
|
public void testBooleanQuery() throws Exception {
|
||||||
BooleanQuery.setMaxClauseCount(2);
|
BooleanQuery.setMaxClauseCount(2);
|
||||||
try {
|
try {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
qp.parse("one two three");
|
qp.parse("one two three");
|
||||||
fail("ParseException expected due to too many boolean clauses");
|
fail("ParseException expected due to too many boolean clauses");
|
||||||
} catch (ParseException expected) {
|
} catch (ParseException expected) {
|
||||||
|
@ -875,7 +874,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
* This test differs from TestPrecedenceQueryParser
|
* This test differs from TestPrecedenceQueryParser
|
||||||
*/
|
*/
|
||||||
public void testPrecedence() throws Exception {
|
public void testPrecedence() throws Exception {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
Query query1 = qp.parse("A AND B OR C AND D");
|
Query query1 = qp.parse("A AND B OR C AND D");
|
||||||
Query query2 = qp.parse("+A +B +C +D");
|
Query query2 = qp.parse("+A +B +C +D");
|
||||||
assertEquals(query1, query2);
|
assertEquals(query1, query2);
|
||||||
|
@ -883,7 +882,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public void testLocalDateFormat() throws IOException, ParseException {
|
public void testLocalDateFormat() throws IOException, ParseException {
|
||||||
RAMDirectory ramDir = new RAMDirectory();
|
RAMDirectory ramDir = new RAMDirectory();
|
||||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
|
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
|
||||||
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
|
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
|
||||||
iw.close();
|
iw.close();
|
||||||
|
@ -899,7 +898,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
|
|
||||||
public void testStarParsing() throws Exception {
|
public void testStarParsing() throws Exception {
|
||||||
final int[] type = new int[1];
|
final int[] type = new int[1];
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)) {
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)) {
|
||||||
@Override
|
@Override
|
||||||
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
|
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
|
||||||
// override error checking of superclass
|
// override error checking of superclass
|
||||||
|
@ -958,7 +957,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStopwords() throws Exception {
|
public void testStopwords() throws Exception {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "a", new StopAnalyzer(Version.LUCENE_CURRENT, StopFilter.makeStopSet(Version.LUCENE_CURRENT, "the", "foo")));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter.makeStopSet(TEST_VERSION_CURRENT, "the", "foo")));
|
||||||
Query result = qp.parse("a:the OR a:foo");
|
Query result = qp.parse("a:the OR a:foo");
|
||||||
assertNotNull("result is null and it shouldn't be", result);
|
assertNotNull("result is null and it shouldn't be", result);
|
||||||
assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
|
assertTrue("result is not a BooleanQuery", result instanceof BooleanQuery);
|
||||||
|
@ -974,7 +973,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testPositionIncrement() throws Exception {
|
public void testPositionIncrement() throws Exception {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "a", new StopAnalyzer(Version.LUCENE_CURRENT, StopFilter.makeStopSet(Version.LUCENE_CURRENT, "the", "in", "are", "this")));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new StopAnalyzer(TEST_VERSION_CURRENT, StopFilter.makeStopSet(TEST_VERSION_CURRENT, "the", "in", "are", "this")));
|
||||||
qp.setEnablePositionIncrements(true);
|
qp.setEnablePositionIncrements(true);
|
||||||
String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
|
String qtxt = "\"the words in poisitions pos02578 are stopped in this phrasequery\"";
|
||||||
// 0 2 5 7 8
|
// 0 2 5 7 8
|
||||||
|
@ -991,7 +990,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMatchAllDocs() throws Exception {
|
public void testMatchAllDocs() throws Exception {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
assertEquals(new MatchAllDocsQuery(), qp.parse("*:*"));
|
assertEquals(new MatchAllDocsQuery(), qp.parse("*:*"));
|
||||||
assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)"));
|
assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)"));
|
||||||
BooleanQuery bq = (BooleanQuery)qp.parse("+*:* -*:*");
|
BooleanQuery bq = (BooleanQuery)qp.parse("+*:* -*:*");
|
||||||
|
@ -1000,7 +999,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertHits(int expected, String query, IndexSearcher is) throws ParseException, IOException {
|
private void assertHits(int expected, String query, IndexSearcher is) throws ParseException, IOException {
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "date", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "date", new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
qp.setLocale(Locale.ENGLISH);
|
qp.setLocale(Locale.ENGLISH);
|
||||||
Query q = qp.parse(query);
|
Query q = qp.parse(query);
|
||||||
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
|
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
|
||||||
|
@ -1028,7 +1027,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
// "match"
|
// "match"
|
||||||
public void testPositionIncrements() throws Exception {
|
public void testPositionIncrements() throws Exception {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
Analyzer a = new StandardAnalyzer(Version.LUCENE_CURRENT);
|
Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
IndexWriter w = new IndexWriter(dir, a, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir, a, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -1036,7 +1035,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
IndexSearcher s = new IndexSearcher(r);
|
IndexSearcher s = new IndexSearcher(r);
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "f", a);
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "f", a);
|
||||||
Query q = qp.parse("\"wizard of ozzy\"");
|
Query q = qp.parse("\"wizard of ozzy\"");
|
||||||
assertEquals(1, s.search(q, 1).totalHits);
|
assertEquals(1, s.search(q, 1).totalHits);
|
||||||
r.close();
|
r.close();
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.search;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -98,7 +96,7 @@ public class BaseTestRangeFilter extends LuceneTestCase {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
/* build an index */
|
/* build an index */
|
||||||
IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(TEST_VERSION_CURRENT), T,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
for (int d = minId; d <= maxId; d++) {
|
for (int d = minId; d <= maxId; d++) {
|
||||||
|
|
|
@ -15,7 +15,7 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.MultiReader;
|
import org.apache.lucene.index.MultiReader;
|
||||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
import static org.apache.lucene.util.LuceneTestCaseJ4.TEST_VERSION_CURRENT;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copyright 2005 Apache Software Foundation
|
* Copyright 2005 Apache Software Foundation
|
||||||
|
@ -200,7 +200,7 @@ public class QueryUtils {
|
||||||
private static RAMDirectory makeEmptyIndex(final int numDeletedDocs)
|
private static RAMDirectory makeEmptyIndex(final int numDeletedDocs)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
RAMDirectory d = new RAMDirectory();
|
RAMDirectory d = new RAMDirectory();
|
||||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
MaxFieldLength.LIMITED);
|
MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < numDeletedDocs; i++) {
|
for (int i = 0; i < numDeletedDocs; i++) {
|
||||||
w.addDocument(new Document());
|
w.addDocument(new Document());
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/** Test BooleanQuery2 against BooleanQuery by overriding the standard query parser.
|
/** Test BooleanQuery2 against BooleanQuery by overriding the standard query parser.
|
||||||
* This also tests the scoring order of BooleanQuery.
|
* This also tests the scoring order of BooleanQuery.
|
||||||
|
@ -51,7 +50,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < docFields.length; i++) {
|
for (int i = 0; i < docFields.length; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
|
||||||
|
@ -68,14 +67,14 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
int docCount = 0;
|
int docCount = 0;
|
||||||
do {
|
do {
|
||||||
final Directory copy = new RAMDirectory(dir2);
|
final Directory copy = new RAMDirectory(dir2);
|
||||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
w.addIndexesNoOptimize(new Directory[] {copy});
|
w.addIndexesNoOptimize(new Directory[] {copy});
|
||||||
docCount = w.maxDoc();
|
docCount = w.maxDoc();
|
||||||
w.close();
|
w.close();
|
||||||
mulFactor *= 2;
|
mulFactor *= 2;
|
||||||
} while(docCount < 3000);
|
} while(docCount < 3000);
|
||||||
|
|
||||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
|
for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
|
||||||
|
@ -107,7 +106,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
||||||
};
|
};
|
||||||
|
|
||||||
public Query makeQuery(String queryText) throws ParseException {
|
public Query makeQuery(String queryText) throws ParseException {
|
||||||
Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer(Version.LUCENE_CURRENT))).parse(queryText);
|
Query q = (new QueryParser(TEST_VERSION_CURRENT, field, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))).parse(queryText);
|
||||||
return q;
|
return q;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.search;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -60,7 +59,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
||||||
|
|
||||||
index = new RAMDirectory();
|
index = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(index,
|
IndexWriter writer = new IndexWriter(index,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
for (int i = 0; i < data.length; i++) {
|
for (int i = 0; i < data.length; i++) {
|
||||||
|
|
|
@ -135,7 +135,7 @@ public class TestBooleanOr extends LuceneTestCase {
|
||||||
RAMDirectory rd = new RAMDirectory();
|
RAMDirectory rd = new RAMDirectory();
|
||||||
|
|
||||||
//
|
//
|
||||||
IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
//
|
//
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.search;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import junit.framework.Test;
|
import junit.framework.Test;
|
||||||
import junit.framework.TestSuite;
|
import junit.framework.TestSuite;
|
||||||
import junit.textui.TestRunner;
|
import junit.textui.TestRunner;
|
||||||
|
@ -81,7 +79,7 @@ public class TestBooleanPrefixQuery extends LuceneTestCase {
|
||||||
Query rw2 = null;
|
Query rw2 = null;
|
||||||
IndexReader reader = null;
|
IndexReader reader = null;
|
||||||
try {
|
try {
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < categories.length; i++) {
|
for (int i = 0; i < categories.length; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
|
||||||
public class TestBooleanQuery extends LuceneTestCase {
|
public class TestBooleanQuery extends LuceneTestCase {
|
||||||
|
@ -61,7 +60,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
||||||
// LUCENE-1630
|
// LUCENE-1630
|
||||||
public void testNullOrSubScorer() throws Throwable {
|
public void testNullOrSubScorer() throws Throwable {
|
||||||
Directory dir = new MockRAMDirectory();
|
Directory dir = new MockRAMDirectory();
|
||||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
|
doc.add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
|
||||||
w.addDocument(doc);
|
w.addDocument(doc);
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestBooleanScorer extends LuceneTestCase
|
public class TestBooleanScorer extends LuceneTestCase
|
||||||
{
|
{
|
||||||
|
@ -45,7 +44,7 @@ public class TestBooleanScorer extends LuceneTestCase
|
||||||
String[] values = new String[] { "1", "2", "3", "4" };
|
String[] values = new String[] { "1", "2", "3", "4" };
|
||||||
|
|
||||||
try {
|
try {
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < values.length; i++) {
|
for (int i = 0; i < values.length; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.lucene.util.OpenBitSetDISI;
|
||||||
public class TestCachingWrapperFilter extends LuceneTestCase {
|
public class TestCachingWrapperFilter extends LuceneTestCase {
|
||||||
public void testCachingWorks() throws Exception {
|
public void testCachingWorks() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
@ -71,7 +71,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
||||||
|
|
||||||
public void testIsCacheAble() throws Exception {
|
public void testIsCacheAble() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
IndexReader reader = IndexReader.open(dir, true);
|
||||||
|
|
|
@ -70,7 +70,7 @@ implements Serializable {
|
||||||
private Directory getIndex()
|
private Directory getIndex()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
RAMDirectory indexStore = new RAMDirectory ();
|
RAMDirectory indexStore = new RAMDirectory ();
|
||||||
IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
RandomGen random = new RandomGen(newRandom());
|
RandomGen random = new RandomGen(newRandom());
|
||||||
for (int i=0; i<INDEX_SIZE; ++i) { // don't decrease; if to low the problem doesn't show up
|
for (int i=0; i<INDEX_SIZE; ++i) { // don't decrease; if to low the problem doesn't show up
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.search;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.DateTools;
|
import org.apache.lucene.document.DateTools;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -51,7 +50,7 @@ public class TestDateFilter
|
||||||
{
|
{
|
||||||
// create an index
|
// create an index
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
|
|
||||||
|
@ -112,7 +111,7 @@ public class TestDateFilter
|
||||||
{
|
{
|
||||||
// create an index
|
// create an index
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test date sorting, i.e. auto-sorting of fields with type "long".
|
* Test date sorting, i.e. auto-sorting of fields with type "long".
|
||||||
|
@ -51,7 +50,7 @@ public class TestDateSort extends LuceneTestCase {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
// Create an index writer.
|
// Create an index writer.
|
||||||
directory = new RAMDirectory();
|
directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
// oldest doc:
|
// oldest doc:
|
||||||
|
@ -76,7 +75,7 @@ public class TestDateSort extends LuceneTestCase {
|
||||||
|
|
||||||
Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true));
|
Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true));
|
||||||
|
|
||||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
Query query = queryParser.parse("Document");
|
Query query = queryParser.parse("Document");
|
||||||
|
|
||||||
// Execute the search and process the search results.
|
// Execute the search and process the search results.
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.search;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -80,7 +79,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase{
|
||||||
|
|
||||||
index = new RAMDirectory();
|
index = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(index,
|
IndexWriter writer = new IndexWriter(index,
|
||||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setSimilarity(sim);
|
writer.setSimilarity(sim);
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.lucene.search;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
@ -40,7 +39,7 @@ public class TestDocBoost extends LuceneTestCase {
|
||||||
|
|
||||||
public void testDocBoost() throws Exception {
|
public void testDocBoost() throws Exception {
|
||||||
RAMDirectory store = new RAMDirectory();
|
RAMDirectory store = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
||||||
Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
||||||
|
|
|
@ -35,7 +35,6 @@ import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
public class TestDocIdSet extends LuceneTestCase {
|
public class TestDocIdSet extends LuceneTestCase {
|
||||||
|
@ -106,7 +105,7 @@ public class TestDocIdSet extends LuceneTestCase {
|
||||||
// Tests that if a Filter produces a null DocIdSet, which is given to
|
// Tests that if a Filter produces a null DocIdSet, which is given to
|
||||||
// IndexSearcher, everything works fine. This came up in LUCENE-1754.
|
// IndexSearcher, everything works fine. This came up in LUCENE-1754.
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("c", "val", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
|
doc.add(new Field("c", "val", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.index.*;
|
import org.apache.lucene.index.*;
|
||||||
import org.apache.lucene.store.*;
|
import org.apache.lucene.store.*;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -36,7 +34,7 @@ public class TestElevationComparator extends LuceneTestCase {
|
||||||
//@Test
|
//@Test
|
||||||
public void testSorting() throws Throwable {
|
public void testSorting() throws Throwable {
|
||||||
Directory directory = new MockRAMDirectory();
|
Directory directory = new MockRAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writer.setMaxBufferedDocs(2);
|
writer.setMaxBufferedDocs(2);
|
||||||
writer.setMergeFactor(1000);
|
writer.setMergeFactor(1000);
|
||||||
writer.addDocument(adoc(new String[] {"id", "a", "title", "ipod", "str_s", "a"}));
|
writer.addDocument(adoc(new String[] {"id", "a", "title", "ipod", "str_s", "a"}));
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.lucene.search.spans.SpanQuery;
|
||||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests primitive queries (ie: that rewrite to themselves) to
|
* Tests primitive queries (ie: that rewrite to themselves) to
|
||||||
|
@ -52,7 +51,7 @@ public class TestExplanations extends LuceneTestCase {
|
||||||
public static final String KEY = "KEY";
|
public static final String KEY = "KEY";
|
||||||
public static final String FIELD = "field";
|
public static final String FIELD = "field";
|
||||||
public static final QueryParser qp =
|
public static final QueryParser qp =
|
||||||
new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
new QueryParser(TEST_VERSION_CURRENT, FIELD, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void tearDown() throws Exception {
|
public void tearDown() throws Exception {
|
||||||
|
@ -64,7 +63,7 @@ public class TestExplanations extends LuceneTestCase {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
for (int i = 0; i < docFields.length; i++) {
|
for (int i = 0; i < docFields.length; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -23,8 +23,6 @@ import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
|
@ -41,7 +39,7 @@ public class TestFieldCache extends LuceneTestCase {
|
||||||
protected void setUp() throws Exception {
|
protected void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
long theLong = Long.MAX_VALUE;
|
long theLong = Long.MAX_VALUE;
|
||||||
double theDouble = Double.MAX_VALUE;
|
double theDouble = Double.MAX_VALUE;
|
||||||
byte theByte = Byte.MAX_VALUE;
|
byte theByte = Byte.MAX_VALUE;
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
|
* A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
|
||||||
|
@ -532,7 +531,7 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
||||||
// test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0
|
// test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0
|
||||||
public void testSparseIndex() throws IOException {
|
public void testSparseIndex() throws IOException {
|
||||||
RAMDirectory dir = new RAMDirectory();
|
RAMDirectory dir = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), T, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), T, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
for (int d = -20; d <= 20; d++) {
|
for (int d = -20; d <= 20; d++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
|
|
@ -27,8 +27,6 @@ import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.DocIdBitSet;
|
import org.apache.lucene.util.DocIdBitSet;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.util.BitSet;
|
import java.util.BitSet;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -50,7 +48,7 @@ public class TestFilteredQuery extends LuceneTestCase {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
directory = new RAMDirectory();
|
directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add (new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
|
doc.add (new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.search;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
@ -51,13 +49,13 @@ public class TestFilteredSearch extends LuceneTestCase {
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
int[] filterBits = {1, 36};
|
int[] filterBits = {1, 36};
|
||||||
SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
|
SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
||||||
// run the test on more than one segment
|
// run the test on more than one segment
|
||||||
enforceSingleSegment = false;
|
enforceSingleSegment = false;
|
||||||
// reset - it is stateful
|
// reset - it is stateful
|
||||||
filter.reset();
|
filter.reset();
|
||||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
// we index 60 docs - this will create 6 segments
|
// we index 60 docs - this will create 6 segments
|
||||||
writer.setMaxBufferedDocs(10);
|
writer.setMaxBufferedDocs(10);
|
||||||
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
||||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.queryParser.QueryParser;
|
import org.apache.lucene.queryParser.QueryParser;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests {@link FuzzyQuery}.
|
* Tests {@link FuzzyQuery}.
|
||||||
|
@ -43,7 +42,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
|
|
||||||
public void testFuzziness() throws Exception {
|
public void testFuzziness() throws Exception {
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc("aaaaa", writer);
|
addDoc("aaaaa", writer);
|
||||||
addDoc("aaaab", writer);
|
addDoc("aaaab", writer);
|
||||||
addDoc("aaabb", writer);
|
addDoc("aaabb", writer);
|
||||||
|
@ -200,7 +199,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
|
|
||||||
public void testFuzzinessLong() throws Exception {
|
public void testFuzzinessLong() throws Exception {
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc("aaaaaaa", writer);
|
addDoc("aaaaaaa", writer);
|
||||||
addDoc("segment", writer);
|
addDoc("segment", writer);
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
|
@ -288,7 +287,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
|
|
||||||
public void testTokenLengthOpt() throws IOException {
|
public void testTokenLengthOpt() throws IOException {
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addDoc("12345678911", writer);
|
addDoc("12345678911", writer);
|
||||||
addDoc("segment", writer);
|
addDoc("segment", writer);
|
||||||
|
@ -320,7 +319,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
|
|
||||||
public void testGiga() throws Exception {
|
public void testGiga() throws Exception {
|
||||||
|
|
||||||
StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
|
StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
Directory index = new MockRAMDirectory();
|
Directory index = new MockRAMDirectory();
|
||||||
IndexWriter w = new IndexWriter(index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
IndexWriter w = new IndexWriter(index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||||
|
@ -345,7 +344,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
||||||
IndexReader r = w.getReader();
|
IndexReader r = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
Query q = new QueryParser(Version.LUCENE_CURRENT, "field", analyzer).parse( "giga~0.9" );
|
Query q = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer).parse( "giga~0.9" );
|
||||||
|
|
||||||
// 3. search
|
// 3. search
|
||||||
IndexSearcher searcher = new IndexSearcher(r);
|
IndexSearcher searcher = new IndexSearcher(r);
|
||||||
|
|
|
@ -29,14 +29,13 @@ import org.apache.lucene.queryParser.QueryParser;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests MatchAllDocsQuery.
|
* Tests MatchAllDocsQuery.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class TestMatchAllDocsQuery extends LuceneTestCase {
|
public class TestMatchAllDocsQuery extends LuceneTestCase {
|
||||||
private Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
|
private Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||||
|
|
||||||
public void testQuery() throws Exception {
|
public void testQuery() throws Exception {
|
||||||
|
|
||||||
|
@ -100,7 +99,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
|
||||||
assertEquals(2, hits.length);
|
assertEquals(2, hits.length);
|
||||||
|
|
||||||
// test parsable toString()
|
// test parsable toString()
|
||||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "key", analyzer);
|
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "key", analyzer);
|
||||||
hits = is.search(qp.parse(new MatchAllDocsQuery().toString()), null, 1000).scoreDocs;
|
hits = is.search(qp.parse(new MatchAllDocsQuery().toString()), null, 1000).scoreDocs;
|
||||||
assertEquals(2, hits.length);
|
assertEquals(2, hits.length);
|
||||||
|
|
||||||
|
|
|
@ -28,8 +28,6 @@ import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -47,7 +45,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
|
||||||
|
|
||||||
public void testPhrasePrefix() throws IOException {
|
public void testPhrasePrefix() throws IOException {
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
add("blueberry pie", writer);
|
add("blueberry pie", writer);
|
||||||
add("blueberry strudel", writer);
|
add("blueberry strudel", writer);
|
||||||
add("blueberry pizza", writer);
|
add("blueberry pizza", writer);
|
||||||
|
@ -141,7 +139,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
|
||||||
// The contained PhraseMultiQuery must contain exactly one term array.
|
// The contained PhraseMultiQuery must contain exactly one term array.
|
||||||
|
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
add("blueberry pie", writer);
|
add("blueberry pie", writer);
|
||||||
add("blueberry chewing gum", writer);
|
add("blueberry chewing gum", writer);
|
||||||
add("blue raspberry pie", writer);
|
add("blue raspberry pie", writer);
|
||||||
|
@ -169,7 +167,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
|
||||||
|
|
||||||
public void testPhrasePrefixWithBooleanQuery() throws IOException {
|
public void testPhrasePrefixWithBooleanQuery() throws IOException {
|
||||||
RAMDirectory indexStore = new RAMDirectory();
|
RAMDirectory indexStore = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
add("This is a test", "object", writer);
|
add("This is a test", "object", writer);
|
||||||
add("a note", "note", writer);
|
add("a note", "note", writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
|
@ -30,8 +30,6 @@ import org.apache.lucene.queryParser.QueryParser;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -84,9 +82,9 @@ public class TestMultiSearcher extends LuceneTestCase
|
||||||
lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||||
|
|
||||||
// creating an index writer for the first index
|
// creating an index writer for the first index
|
||||||
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
// creating an index writer for the second index, but writing nothing
|
// creating an index writer for the second index, but writing nothing
|
||||||
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
//--------------------------------------------------------------------
|
//--------------------------------------------------------------------
|
||||||
// scenario 1
|
// scenario 1
|
||||||
|
@ -103,7 +101,7 @@ public class TestMultiSearcher extends LuceneTestCase
|
||||||
writerB.close();
|
writerB.close();
|
||||||
|
|
||||||
// creating the query
|
// creating the query
|
||||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fulltext", new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
Query query = parser.parse("handle:1");
|
Query query = parser.parse("handle:1");
|
||||||
|
|
||||||
// building the searchables
|
// building the searchables
|
||||||
|
@ -130,7 +128,7 @@ public class TestMultiSearcher extends LuceneTestCase
|
||||||
//--------------------------------------------------------------------
|
//--------------------------------------------------------------------
|
||||||
|
|
||||||
// adding one document to the empty index
|
// adding one document to the empty index
|
||||||
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writerB.addDocument(lDoc);
|
writerB.addDocument(lDoc);
|
||||||
writerB.optimize();
|
writerB.optimize();
|
||||||
writerB.close();
|
writerB.close();
|
||||||
|
@ -176,7 +174,7 @@ public class TestMultiSearcher extends LuceneTestCase
|
||||||
readerB.close();
|
readerB.close();
|
||||||
|
|
||||||
// optimizing the index with the writer
|
// optimizing the index with the writer
|
||||||
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
writerB.optimize();
|
writerB.optimize();
|
||||||
writerB.close();
|
writerB.close();
|
||||||
|
|
||||||
|
|
|
@ -26,8 +26,6 @@ import org.apache.lucene.queryParser.ParseException;
|
||||||
import org.apache.lucene.queryParser.QueryParser;
|
import org.apache.lucene.queryParser.QueryParser;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -88,7 +86,7 @@ public class TestMultiSearcherRanking extends LuceneTestCase {
|
||||||
private void checkQuery(String queryStr) throws IOException, ParseException {
|
private void checkQuery(String queryStr) throws IOException, ParseException {
|
||||||
// check result hit ranking
|
// check result hit ranking
|
||||||
if(verbose) System.out.println("Query: " + queryStr);
|
if(verbose) System.out.println("Query: " + queryStr);
|
||||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, FIELD_NAME, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
|
QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||||
Query query = queryParser.parse(queryStr);
|
Query query = queryParser.parse(queryStr);
|
||||||
ScoreDoc[] multiSearcherHits = multiSearcher.search(query, null, 1000).scoreDocs;
|
ScoreDoc[] multiSearcherHits = multiSearcher.search(query, null, 1000).scoreDocs;
|
||||||
ScoreDoc[] singleSearcherHits = singleSearcher.search(query, null, 1000).scoreDocs;
|
ScoreDoc[] singleSearcherHits = singleSearcher.search(query, null, 1000).scoreDocs;
|
||||||
|
@ -115,12 +113,12 @@ public class TestMultiSearcherRanking extends LuceneTestCase {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
// create MultiSearcher from two seperate searchers
|
// create MultiSearcher from two seperate searchers
|
||||||
Directory d1 = new RAMDirectory();
|
Directory d1 = new RAMDirectory();
|
||||||
IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
|
IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addCollection1(iw1);
|
addCollection1(iw1);
|
||||||
iw1.close();
|
iw1.close();
|
||||||
Directory d2 = new RAMDirectory();
|
Directory d2 = new RAMDirectory();
|
||||||
IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
|
IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addCollection2(iw2);
|
addCollection2(iw2);
|
||||||
iw2.close();
|
iw2.close();
|
||||||
|
@ -132,7 +130,7 @@ public class TestMultiSearcherRanking extends LuceneTestCase {
|
||||||
|
|
||||||
// create IndexSearcher which contains all documents
|
// create IndexSearcher which contains all documents
|
||||||
Directory d = new RAMDirectory();
|
Directory d = new RAMDirectory();
|
||||||
IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
|
IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
addCollection1(iw);
|
addCollection1(iw);
|
||||||
addCollection2(iw);
|
addCollection2(iw);
|
||||||
|
|
|
@ -26,8 +26,6 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.text.Collator;
|
import java.text.Collator;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
@ -66,7 +64,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
||||||
"X 4 5 6" };
|
"X 4 5 6" };
|
||||||
|
|
||||||
small = new RAMDirectory();
|
small = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
for (int i = 0; i < data.length; i++) {
|
for (int i = 0; i < data.length; i++) {
|
||||||
|
@ -617,7 +615,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
||||||
|
|
||||||
/* build an index */
|
/* build an index */
|
||||||
RAMDirectory farsiIndex = new RAMDirectory();
|
RAMDirectory farsiIndex = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(TEST_VERSION_CURRENT), T,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
|
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
|
||||||
|
@ -657,7 +655,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
||||||
|
|
||||||
/* build an index */
|
/* build an index */
|
||||||
RAMDirectory danishIndex = new RAMDirectory();
|
RAMDirectory danishIndex = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(danishIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
IndexWriter writer = new IndexWriter(danishIndex, new SimpleAnalyzer(TEST_VERSION_CURRENT), T,
|
||||||
IndexWriter.MaxFieldLength.LIMITED);
|
IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
|
||||||
// Danish collation orders the words below in the given order
|
// Danish collation orders the words below in the given order
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.lucene.search;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
@ -42,7 +41,7 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws Exception {
|
||||||
super.setUp();
|
super.setUp();
|
||||||
IndexWriter writer
|
IndexWriter writer
|
||||||
= new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
= new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
//writer.setUseCompoundFile(false);
|
//writer.setUseCompoundFile(false);
|
||||||
//writer.infoStream = System.out;
|
//writer.infoStream = System.out;
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.util.Version;
|
|
||||||
|
|
||||||
public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
||||||
|
|
||||||
|
@ -44,7 +43,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
||||||
final Random rnd = newRandom();
|
final Random rnd = newRandom();
|
||||||
|
|
||||||
RAMDirectory directory = new RAMDirectory();
|
RAMDirectory directory = new RAMDirectory();
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, MaxFieldLength.UNLIMITED);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
|
||||||
|
|
||||||
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
|
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue