mirror of https://github.com/apache/lucene.git
LUCENE-2240, LUCENE-2241: SimpleAnalyzer and WhitespaceAnalyzer now have Version ctors. This commit also fixes all core tests to no longer use the deprecated ctors.
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@905044 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fdf4ea2448
commit
869c789563
12
CHANGES.txt
12
CHANGES.txt
|
@ -61,6 +61,9 @@ API Changes
|
|||
members. These were converted to private and unused protected
|
||||
constructors removed. (Steven Rowe via Robert Muir)
|
||||
|
||||
* LUCENE-2240: SimpleAnalyzer and WhitespaceAnalyzer now have
|
||||
Version ctors. (Simon Willnauer via Uwe Schindler)
|
||||
|
||||
Bug fixes
|
||||
|
||||
* LUCENE-2092: BooleanQuery was ignoring disableCoord in its hashCode
|
||||
|
@ -128,10 +131,11 @@ New features
|
|||
* LUCENE-2198: Support protected words in stemming TokenFilters using a
|
||||
new KeywordAttribute. (Simon Willnauer via Uwe Schindler)
|
||||
|
||||
* LUCENE-2183: Added Unicode 4 support to CharTokenizer and its subclasses.
|
||||
CharTokenizer now has new int-API which is conditionally preferred to
|
||||
the old char-API depending on the provided Version. Version < 3.1 will
|
||||
use the char-API. (Simon Willnauer via Uwe Schindler)
|
||||
* LUCENE-2183, LUCENE-2240, LUCENE-2241: Added Unicode 4 support
|
||||
to CharTokenizer and its subclasses. CharTokenizer now has new
|
||||
int-API which is conditionally preferred to the old char-API depending
|
||||
on the provided Version. Version < 3.1 will use the char-API.
|
||||
(Simon Willnauer via Uwe Schindler)
|
||||
|
||||
Optimizations
|
||||
|
||||
|
|
|
@ -19,14 +19,42 @@ package org.apache.lucene.analysis;
|
|||
|
||||
import java.io.Reader;
|
||||
|
||||
/** An {@link Analyzer} that filters {@link LetterTokenizer}
|
||||
* with {@link LowerCaseFilter} */
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/** An {@link Analyzer} that filters {@link LetterTokenizer}
|
||||
* with {@link LowerCaseFilter}
|
||||
* <p>
|
||||
* <a name="version">You must specify the required {@link Version} compatibility
|
||||
* when creating {@link CharTokenizer}:
|
||||
* <ul>
|
||||
* <li>As of 3.1, {@link LowerCaseTokenizer} uses an int based API to normalize and
|
||||
* detect token codepoints. See {@link CharTokenizer#isTokenChar(int)} and
|
||||
* {@link CharTokenizer#normalize(int)} for details.</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
**/
|
||||
public final class SimpleAnalyzer extends ReusableAnalyzerBase {
|
||||
|
||||
private final Version matchVersion;
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleAnalyzer}
|
||||
* @param matchVersion Lucene version to match See {@link <a href="#version">above</a>}
|
||||
*/
|
||||
public SimpleAnalyzer(Version matchVersion) {
|
||||
this.matchVersion = matchVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link SimpleAnalyzer}
|
||||
* @deprecated use {@link #SimpleAnalyzer(Version)} instead
|
||||
*/
|
||||
@Deprecated public SimpleAnalyzer() {
|
||||
this(Version.LUCENE_30);
|
||||
}
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(final String fieldName,
|
||||
final Reader reader) {
|
||||
return new TokenStreamComponents(new LowerCaseTokenizer(reader));
|
||||
return new TokenStreamComponents(new LowerCaseTokenizer(matchVersion, reader));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,7 +99,7 @@ public final class StopAnalyzer extends StopwordAnalyzerBase {
|
|||
@Override
|
||||
protected TokenStreamComponents createComponents(String fieldName,
|
||||
Reader reader) {
|
||||
final Tokenizer source = new LowerCaseTokenizer(reader);
|
||||
final Tokenizer source = new LowerCaseTokenizer(matchVersion, reader);
|
||||
return new TokenStreamComponents(source, new StopFilter(matchVersion,
|
||||
source, stopwords));
|
||||
}
|
||||
|
|
|
@ -19,13 +19,44 @@ package org.apache.lucene.analysis;
|
|||
|
||||
import java.io.Reader;
|
||||
|
||||
/** An Analyzer that uses {@link WhitespaceTokenizer}. */
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* An Analyzer that uses {@link WhitespaceTokenizer}.
|
||||
* <p>
|
||||
* <a name="version">You must specify the required {@link Version} compatibility
|
||||
* when creating {@link CharTokenizer}:
|
||||
* <ul>
|
||||
* <li>As of 3.1, {@link WhitespaceTokenizer} uses an int based API to normalize and
|
||||
* detect token codepoints. See {@link CharTokenizer#isTokenChar(int)} and
|
||||
* {@link CharTokenizer#normalize(int)} for details.</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
**/
|
||||
public final class WhitespaceAnalyzer extends ReusableAnalyzerBase {
|
||||
|
||||
|
||||
private final Version matchVersion;
|
||||
|
||||
/**
|
||||
* Creates a new {@link WhitespaceAnalyzer}
|
||||
* @param matchVersion Lucene version to match See {@link <a href="#version">above</a>}
|
||||
*/
|
||||
public WhitespaceAnalyzer(Version matchVersion) {
|
||||
this.matchVersion = matchVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link WhitespaceAnalyzer}
|
||||
* @deprecated use {@link #WhitespaceAnalyzer(Version)} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public WhitespaceAnalyzer() {
|
||||
this(Version.LUCENE_30);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected TokenStreamComponents createComponents(final String fieldName,
|
||||
final Reader reader) {
|
||||
return new TokenStreamComponents(new WhitespaceTokenizer(reader));
|
||||
return new TokenStreamComponents(new WhitespaceTokenizer(matchVersion, reader));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene;
|
|||
*/
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -95,7 +96,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||
doc.add(idField);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MyMergeScheduler ms = new MyMergeScheduler();
|
||||
writer.setMergeScheduler(ms);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
|
|
|
@ -74,7 +74,7 @@ public class TestSearch extends LuceneTestCase {
|
|||
throws Exception
|
||||
{
|
||||
Directory directory = new RAMDirectory();
|
||||
Analyzer analyzer = new SimpleAnalyzer();
|
||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
|||
|
||||
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
|
||||
Directory directory = new RAMDirectory();
|
||||
Analyzer analyzer = new SimpleAnalyzer();
|
||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.analysis;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.List;
|
||||
|
@ -28,7 +29,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
|
|||
|
||||
// testLain1Accents() is a copy of TestLatin1AccentFilter.testU().
|
||||
public void testLatin1Accents() throws Exception {
|
||||
TokenStream stream = new WhitespaceTokenizer(new StringReader
|
||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader
|
||||
("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ"
|
||||
+" Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij"
|
||||
+" ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
||||
|
@ -1889,7 +1890,7 @@ public class TestASCIIFoldingFilter extends BaseTokenStreamTestCase {
|
|||
expectedOutputTokens.add(expected.toString());
|
||||
}
|
||||
|
||||
TokenStream stream = new WhitespaceTokenizer(new StringReader(inputText.toString()));
|
||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(inputText.toString()));
|
||||
ASCIIFoldingFilter filter = new ASCIIFoldingFilter(stream);
|
||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||
Iterator<String> expectedIter = expectedOutputTokens.iterator();
|
||||
|
|
|
@ -35,7 +35,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
Analyzer a = new SimpleAnalyzer();
|
||||
Analyzer a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
assertAnalyzesTo(a, "foo bar FOO BAR",
|
||||
new String[] { "foo", "bar", "foo", "bar" });
|
||||
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
||||
|
@ -55,7 +55,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
public void testNull() throws Exception {
|
||||
Analyzer a = new WhitespaceAnalyzer();
|
||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
assertAnalyzesTo(a, "foo bar FOO BAR",
|
||||
new String[] { "foo", "bar", "FOO", "BAR" });
|
||||
assertAnalyzesTo(a, "foo bar . FOO <> BAR",
|
||||
|
@ -97,11 +97,11 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
public void testPayloadCopy() throws IOException {
|
||||
String s = "how now brown cow";
|
||||
TokenStream ts;
|
||||
ts = new WhitespaceTokenizer(new StringReader(s));
|
||||
ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(s));
|
||||
ts = new PayloadSetter(ts);
|
||||
verifyPayload(ts);
|
||||
|
||||
ts = new WhitespaceTokenizer(new StringReader(s));
|
||||
ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(s));
|
||||
ts = new PayloadSetter(ts);
|
||||
verifyPayload(ts);
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
|
||||
@Override
|
||||
public TokenStream tokenStream(String field, Reader reader) {
|
||||
return new WhitespaceAnalyzer().tokenStream(field, reader);
|
||||
return new WhitespaceAnalyzer(Version.LUCENE_CURRENT).tokenStream(field, reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,7 +145,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new LowerCaseFilter(Version.LUCENE_CURRENT,
|
||||
new WhitespaceTokenizer(reader));
|
||||
new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -192,8 +192,8 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
|
|||
public void testLowerCaseFilterLowSurrogateLeftover() throws IOException {
|
||||
// test if the limit of the termbuffer is correctly used with supplementary
|
||||
// chars
|
||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(new StringReader(
|
||||
"BogustermBogusterm\udc16"));
|
||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
||||
new StringReader("BogustermBogusterm\udc16"));
|
||||
LowerCaseFilter filter = new LowerCaseFilter(Version.LUCENE_CURRENT,
|
||||
tokenizer);
|
||||
assertTokenStreamContents(filter, new String[] {"bogustermbogusterm\udc16"});
|
||||
|
|
|
@ -31,13 +31,14 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.index.TermPositions;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
||||
private String[] tokens = new String[] {"term1", "term2", "term3", "term2"};
|
||||
|
||||
public void testCaching() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
TokenStream stream = new TokenStream() {
|
||||
private int index = 0;
|
||||
|
|
|
@ -100,7 +100,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testLowerCaseTokenizer() throws IOException {
|
||||
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(Version.LUCENE_31,
|
||||
LowerCaseTokenizer tokenizer = new LowerCaseTokenizer(Version.LUCENE_CURRENT,
|
||||
reader);
|
||||
assertTokenStreamContents(tokenizer, new String[] { "tokenizer",
|
||||
"\ud801\udc44test" });
|
||||
|
@ -115,7 +115,7 @@ public class TestCharTokenizers extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testWhitespaceTokenizer() throws IOException {
|
||||
StringReader reader = new StringReader("Tokenizer \ud801\udc1ctest");
|
||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_31,
|
||||
WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
||||
reader);
|
||||
assertTokenStreamContents(tokenizer, new String[] { "Tokenizer",
|
||||
"\ud801\udc1ctest" });
|
||||
|
|
|
@ -18,12 +18,13 @@ package org.apache.lucene.analysis;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
public class TestISOLatin1AccentFilter extends BaseTokenStreamTestCase {
|
||||
public void testU() throws Exception {
|
||||
TokenStream stream = new WhitespaceTokenizer(new StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("Des mot clés À LA CHAÎNE À Á Â Ã Ä Å Æ Ç È É Ê Ë Ì Í Î Ï IJ Ð Ñ Ò Ó Ô Õ Ö Ø Œ Þ Ù Ú Û Ü Ý Ÿ à á â ã ä å æ ç è é ê ë ì í î ï ij ð ñ ò ó ô õ ö ø œ ß þ ù ú û ü ý ÿ fi fl"));
|
||||
ISOLatin1AccentFilter filter = new ISOLatin1AccentFilter(stream);
|
||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||
assertTermEquals("Des", filter, termAtt);
|
||||
|
|
|
@ -43,7 +43,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory,
|
||||
new SimpleAnalyzer(),
|
||||
new SimpleAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -57,7 +57,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
public void testPerFieldAnalyzer() throws Exception {
|
||||
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
|
||||
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
||||
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
|
||||
|
||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, "description", analyzer);
|
||||
|
|
|
@ -39,16 +39,16 @@ public class TestKeywordMarkerTokenFilter extends BaseTokenStreamTestCase {
|
|||
String[] output = new String[] { "the", "quick", "brown", "LuceneFox",
|
||||
"jumps" };
|
||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(new StringReader(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
||||
"The quIck browN LuceneFox Jumps")), set)), output);
|
||||
Set<String> jdkSet = new HashSet<String>();
|
||||
jdkSet.add("LuceneFox");
|
||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(new StringReader(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
||||
"The quIck browN LuceneFox Jumps")), jdkSet)), output);
|
||||
Set<?> set2 = set;
|
||||
assertTokenStreamContents(new LowerCaseFilterMock(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(new StringReader(
|
||||
new KeywordMarkerTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(
|
||||
"The quIck browN LuceneFox Jumps")), set2)), output);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,13 +18,14 @@ package org.apache.lucene.analysis;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.StringReader;
|
||||
|
||||
public class TestLengthFilter extends BaseTokenStreamTestCase {
|
||||
|
||||
public void testFilter() throws Exception {
|
||||
TokenStream stream = new WhitespaceTokenizer(
|
||||
TokenStream stream = new WhitespaceTokenizer(Version.LUCENE_CURRENT,
|
||||
new StringReader("short toolong evenmuchlongertext a ab toolong foo"));
|
||||
LengthFilter filter = new LengthFilter(stream, 2, 6);
|
||||
TermAttribute termAtt = filter.getAttribute(TermAttribute.class);
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.lucene.analysis;
|
|||
|
||||
import java.io.StringReader;
|
||||
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
||||
|
||||
NormalizeCharMap normMap;
|
||||
|
@ -58,55 +60,55 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testNothingChange() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "x" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"x"}, new int[]{0}, new int[]{1});
|
||||
}
|
||||
|
||||
public void test1to1() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "h" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"i"}, new int[]{0}, new int[]{1});
|
||||
}
|
||||
|
||||
public void test1to2() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "j" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"jj"}, new int[]{0}, new int[]{1});
|
||||
}
|
||||
|
||||
public void test1to3() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "k" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"kkk"}, new int[]{0}, new int[]{1});
|
||||
}
|
||||
|
||||
public void test2to4() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "ll" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"llll"}, new int[]{0}, new int[]{2});
|
||||
}
|
||||
|
||||
public void test2to1() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "aa" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"a"}, new int[]{0}, new int[]{2});
|
||||
}
|
||||
|
||||
public void test3to1() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "bbb" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"b"}, new int[]{0}, new int[]{3});
|
||||
}
|
||||
|
||||
public void test4to2() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "cccc" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[]{"cc"}, new int[]{0}, new int[]{4});
|
||||
}
|
||||
|
||||
public void test5to0() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, new StringReader( "empty" ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts, new String[0]);
|
||||
}
|
||||
|
||||
|
@ -130,7 +132,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
|||
//
|
||||
public void testTokenStream() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap, CharReader.get( new StringReader( "h i j k ll cccc bbb aa" ) ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts,
|
||||
new String[]{"i","i","jj","kkk","llll","cc","b","a"},
|
||||
new int[]{0,2,4,6,8,11,16,20},
|
||||
|
@ -151,7 +153,7 @@ public class TestMappingCharFilter extends BaseTokenStreamTestCase {
|
|||
public void testChained() throws Exception {
|
||||
CharStream cs = new MappingCharFilter( normMap,
|
||||
new MappingCharFilter( normMap, CharReader.get( new StringReader( "aaaa ll h" ) ) ) );
|
||||
TokenStream ts = new WhitespaceTokenizer( cs );
|
||||
TokenStream ts = new WhitespaceTokenizer( Version.LUCENE_CURRENT, cs );
|
||||
assertTokenStreamContents(ts,
|
||||
new String[]{"a","llllllll","i"},
|
||||
new int[]{0,5,8},
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.apache.lucene.analysis;
|
|||
import java.io.StringReader;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
|
@ -25,8 +26,8 @@ public class TestPerFieldAnalzyerWrapper extends BaseTokenStreamTestCase {
|
|||
public void testPerField() throws Exception {
|
||||
String text = "Qwerty";
|
||||
PerFieldAnalyzerWrapper analyzer =
|
||||
new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer());
|
||||
analyzer.addAnalyzer("special", new SimpleAnalyzer());
|
||||
new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
analyzer.addAnalyzer("special", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
||||
|
||||
TokenStream tokenStream = analyzer.tokenStream("field",
|
||||
new StringReader(text));
|
||||
|
|
|
@ -62,7 +62,7 @@ public class TestPorterStemFilter extends BaseTokenStreamTestCase {
|
|||
public void testWithKeywordAttribute() throws IOException {
|
||||
CharArraySet set = new CharArraySet(Version.LUCENE_CURRENT, 1, true);
|
||||
set.add("yourselves");
|
||||
Tokenizer tokenizer = new WhitespaceTokenizer(new StringReader("yourselves yours"));
|
||||
Tokenizer tokenizer = new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("yourselves yours"));
|
||||
TokenStream filter = new PorterStemFilter(new KeywordMarkerTokenFilter(tokenizer, set));
|
||||
assertTokenStreamContents(filter, new String[] {"yourselves", "your"});
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
|||
public void testExactCase() throws IOException {
|
||||
StringReader reader = new StringReader("Now is The Time");
|
||||
Set<String> stopWords = new HashSet<String>(Arrays.asList("is", "the", "Time"));
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(reader), stopWords, false);
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopWords, false);
|
||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||
assertTrue(stream.incrementToken());
|
||||
assertEquals("Now", termAtt.term());
|
||||
|
@ -50,7 +50,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
|||
public void testIgnoreCase() throws IOException {
|
||||
StringReader reader = new StringReader("Now is The Time");
|
||||
Set<Object> stopWords = new HashSet<Object>(Arrays.asList( "is", "the", "Time" ));
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(reader), stopWords, true);
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopWords, true);
|
||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||
assertTrue(stream.incrementToken());
|
||||
assertEquals("Now", termAtt.term());
|
||||
|
@ -61,7 +61,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
|||
StringReader reader = new StringReader("Now is The Time");
|
||||
String[] stopWords = new String[] { "is", "the", "Time" };
|
||||
Set<Object> stopSet = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords);
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(reader), stopSet);
|
||||
TokenStream stream = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
||||
final TermAttribute termAtt = stream.getAttribute(TermAttribute.class);
|
||||
assertTrue(stream.incrementToken());
|
||||
assertEquals("Now", termAtt.term());
|
||||
|
@ -87,11 +87,11 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
|||
Set<Object> stopSet = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords);
|
||||
// with increments
|
||||
StringReader reader = new StringReader(sb.toString());
|
||||
StopFilter stpf = new StopFilter(Version.LUCENE_24, new WhitespaceTokenizer(reader), stopSet);
|
||||
StopFilter stpf = new StopFilter(Version.LUCENE_24, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
||||
doTestStopPositons(stpf,true);
|
||||
// without increments
|
||||
reader = new StringReader(sb.toString());
|
||||
stpf = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(reader), stopSet);
|
||||
stpf = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet);
|
||||
doTestStopPositons(stpf,false);
|
||||
// with increments, concatenating two stop filters
|
||||
ArrayList<String> a0 = new ArrayList<String>();
|
||||
|
@ -110,7 +110,7 @@ public class TestStopFilter extends BaseTokenStreamTestCase {
|
|||
Set<Object> stopSet0 = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords0);
|
||||
Set<Object> stopSet1 = StopFilter.makeStopSet(Version.LUCENE_CURRENT, stopWords1);
|
||||
reader = new StringReader(sb.toString());
|
||||
StopFilter stpf0 = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(reader), stopSet0); // first part of the set
|
||||
StopFilter stpf0 = new StopFilter(Version.LUCENE_CURRENT, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader), stopSet0); // first part of the set
|
||||
stpf0.setEnablePositionIncrements(true);
|
||||
StopFilter stpf01 = new StopFilter(Version.LUCENE_CURRENT, stpf0, stopSet1); // two stop filters concatenated!
|
||||
doTestStopPositons(stpf01,true);
|
||||
|
|
|
@ -76,7 +76,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
|||
|
||||
|
||||
public void testGeneral() throws IOException {
|
||||
final TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(new StringReader(buffer1.toString())));
|
||||
final TeeSinkTokenFilter source = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer1.toString())));
|
||||
final TokenStream sink1 = source.newSinkTokenStream();
|
||||
final TokenStream sink2 = source.newSinkTokenStream(theFilter);
|
||||
|
||||
|
@ -90,7 +90,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
|||
}
|
||||
|
||||
public void testMultipleSources() throws Exception {
|
||||
final TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(new StringReader(buffer1.toString())));
|
||||
final TeeSinkTokenFilter tee1 = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer1.toString())));
|
||||
final TeeSinkTokenFilter.SinkTokenStream dogDetector = tee1.newSinkTokenStream(dogFilter);
|
||||
final TeeSinkTokenFilter.SinkTokenStream theDetector = tee1.newSinkTokenStream(theFilter);
|
||||
final TokenStream source1 = new CachingTokenFilter(tee1);
|
||||
|
@ -99,7 +99,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
|||
dogDetector.addAttribute(CheckClearAttributesAttribute.class);
|
||||
theDetector.addAttribute(CheckClearAttributesAttribute.class);
|
||||
|
||||
final TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(new StringReader(buffer2.toString())));
|
||||
final TeeSinkTokenFilter tee2 = new TeeSinkTokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer2.toString())));
|
||||
tee2.addSinkTokenStream(dogDetector);
|
||||
tee2.addSinkTokenStream(theDetector);
|
||||
final TokenStream source2 = tee2;
|
||||
|
|
|
@ -179,7 +179,7 @@ public class CollationTestBase extends TestCase {
|
|||
String usResult) throws Exception {
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
PerFieldAnalyzerWrapper analyzer
|
||||
= new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer());
|
||||
= new PerFieldAnalyzerWrapper(new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
analyzer.addAnalyzer("US", usAnalyzer);
|
||||
analyzer.addAnalyzer("France", franceAnalyzer);
|
||||
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
class DocHelper {
|
||||
public static final String FIELD_1_TEXT = "field one text";
|
||||
|
@ -218,7 +219,7 @@ class DocHelper {
|
|||
*/
|
||||
public static SegmentInfo writeDoc(Directory dir, Document doc) throws IOException
|
||||
{
|
||||
return writeDoc(dir, new WhitespaceAnalyzer(), Similarity.getDefault(), doc);
|
||||
return writeDoc(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), Similarity.getDefault(), doc);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -428,7 +429,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
private IndexWriter newWriter(Directory dir, boolean create)
|
||||
throws IOException {
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
return writer;
|
||||
}
|
||||
|
@ -502,7 +503,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
public void testHangOnClose() throws IOException {
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
|
||||
writer.setMaxBufferedDocs(5);
|
||||
writer.setUseCompoundFile(false);
|
||||
|
@ -528,7 +529,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
|
||||
lmp.setMinMergeMB(0.0001);
|
||||
writer.setMergePolicy(lmp);
|
||||
|
|
|
@ -26,7 +26,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
|
||||
public class TestAtomicUpdate extends LuceneTestCase {
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer();
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
private Random RANDOM;
|
||||
|
||||
public class MockIndexWriter extends IndexWriter {
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.ReaderUtil;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
/*
|
||||
|
@ -217,7 +218,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
hasTested29++;
|
||||
}
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
||||
|
@ -272,7 +273,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
}
|
||||
|
||||
public void searchIndex(String dirName, String oldName) throws IOException {
|
||||
//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer());
|
||||
//QueryParser parser = new QueryParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
//Query query = parser.parse("handle:1");
|
||||
|
||||
dirName = fullDir(dirName);
|
||||
|
@ -357,7 +358,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
|
||||
// open writer
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
// add 10 docs
|
||||
for(int i=0;i<10;i++) {
|
||||
|
@ -401,7 +402,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -451,7 +452,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -473,7 +474,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(doCFS);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
|
||||
|
@ -484,7 +485,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
writer.close();
|
||||
|
||||
// open fresh writer so we get no prx file in the added segment
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(doCFS);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
addNoProxDoc(writer);
|
||||
|
@ -511,7 +512,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
|
|||
try {
|
||||
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setRAMBufferSizeMB(16.0);
|
||||
for(int i=0;i<35;i++) {
|
||||
addDoc(writer, i);
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.List;
|
|||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -34,7 +35,7 @@ public class TestCheckIndex extends LuceneTestCase {
|
|||
|
||||
public void testDeletedDocs() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -25,12 +25,13 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
||||
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer();
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
|
||||
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
|
||||
boolean doFail;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.NoLockFactory;
|
||||
|
@ -35,7 +36,7 @@ public class TestCrash extends LuceneTestCase {
|
|||
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
//writer.setMaxBufferedDocs(2);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/*
|
||||
Verify we can read the pre-2.1 file format, do searches
|
||||
|
@ -201,7 +202,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
|
||||
|
@ -210,7 +211,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
// Record last time when writer performed deletes of
|
||||
// past commits
|
||||
lastDeleteTime = System.currentTimeMillis();
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -271,7 +272,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
Directory dir = new RAMDirectory();
|
||||
policy.dir = dir;
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||
|
@ -280,7 +281,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -318,7 +319,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
// Open & close a writer and assert that it
|
||||
// actually removed something:
|
||||
int preCount = dir.listAll().length;
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
int postCount = dir.listAll().length;
|
||||
assertTrue(postCount < preCount);
|
||||
|
@ -340,7 +341,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
Directory dir = new MockRAMDirectory();
|
||||
policy.dir = dir;
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<10;i++) {
|
||||
addDoc(writer);
|
||||
|
@ -359,7 +360,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
assertTrue(lastCommit != null);
|
||||
|
||||
// Now add 1 doc and optimize
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer);
|
||||
assertEquals(11, writer.numDocs());
|
||||
writer.optimize();
|
||||
|
@ -368,7 +369,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
assertEquals(7, IndexReader.listCommits(dir).size());
|
||||
|
||||
// Now open writer on the commit just before optimize:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
// Should undo our rollback:
|
||||
|
@ -380,7 +381,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
assertEquals(11, r.numDocs());
|
||||
r.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
// Commits the rollback:
|
||||
writer.close();
|
||||
|
@ -396,7 +397,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
r.close();
|
||||
|
||||
// Reoptimize
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -407,7 +408,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
|
||||
// Now open writer on the commit just before optimize,
|
||||
// but this time keeping only the last commit:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
// Reader still sees optimized index, because writer
|
||||
|
@ -443,7 +444,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int i=0;i<107;i++) {
|
||||
|
@ -451,7 +452,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -486,7 +487,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
for(int j=0;j<N+1;j++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int i=0;i<17;i++) {
|
||||
|
@ -541,14 +542,14 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
Term searchTerm = new Term("content", "aaa");
|
||||
Query query = new TermQuery(searchTerm);
|
||||
|
||||
for(int i=0;i<N+1;i++) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -565,7 +566,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
reader.close();
|
||||
searcher.close();
|
||||
}
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
// this is a commit
|
||||
|
@ -636,7 +637,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
|
@ -645,7 +646,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
|
||||
for(int i=0;i<N+1;i++) {
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
|
@ -663,7 +664,7 @@ public class TestDeletionPolicy extends LuceneTestCase
|
|||
reader.close();
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
// This will not commit: there are no changes
|
||||
// pending because we opened for "create":
|
||||
writer.close();
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
|
||||
/** JUnit adaptation of an older test case DocTest. */
|
||||
|
@ -109,7 +110,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
PrintWriter out = new PrintWriter(sw, true);
|
||||
|
||||
Directory directory = FSDirectory.open(indexDir);
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
SegmentInfo si1 = indexDoc(writer, "test.txt");
|
||||
printSegment(out, si1);
|
||||
|
@ -137,7 +138,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
out = new PrintWriter(sw, true);
|
||||
|
||||
directory = FSDirectory.open(indexDir);
|
||||
writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
si1 = indexDoc(writer, "test.txt");
|
||||
printSegment(out, si1);
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.document.Field.TermVector;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.AttributeSource;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestDocumentWriter extends LuceneTestCase {
|
||||
|
@ -61,7 +62,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
public void testAddDocument() throws Exception {
|
||||
Document testDoc = new Document();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
Analyzer analyzer = new WhitespaceAnalyzer();
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(testDoc);
|
||||
writer.commit();
|
||||
|
@ -110,7 +111,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new WhitespaceTokenizer(reader);
|
||||
return new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -143,7 +144,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new TokenFilter(new WhitespaceTokenizer(reader)) {
|
||||
return new TokenFilter(new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader)) {
|
||||
boolean first=true;
|
||||
AttributeSource.State state;
|
||||
|
||||
|
@ -207,7 +208,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
|
||||
|
||||
public void testPreAnalyzedField() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
|
||||
doc.add(new Field("preanalyzed", new TokenStream() {
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
@ -50,7 +51,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
fieldInfos = new FieldInfos();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
fieldInfos.add(testDoc);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.addDocument(testDoc);
|
||||
writer.close();
|
||||
|
@ -211,7 +212,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
FSDirectory tmpDir = FSDirectory.open(file);
|
||||
assertTrue(tmpDir != null);
|
||||
|
||||
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.addDocument(testDoc);
|
||||
writer.close();
|
||||
|
@ -392,7 +393,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
try {
|
||||
Directory dir = new FaultyFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<2;i++)
|
||||
writer.addDocument(testDoc);
|
||||
writer.optimize();
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.lucene.index;
|
|||
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import junit.framework.TestSuite;
|
||||
import junit.textui.TestRunner;
|
||||
|
||||
|
@ -97,7 +99,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
|||
*/
|
||||
public void testFilterIndexReader() throws Exception {
|
||||
RAMDirectory directory = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document d1 = new Document();
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -40,7 +41,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
int i;
|
||||
for(i=0;i<35;i++) {
|
||||
|
@ -145,7 +146,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
|
|||
|
||||
// Open & close a writer: it should delete the above 4
|
||||
// files and nothing more:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
|
||||
String[] files2 = dir.listAll();
|
||||
|
|
|
@ -54,6 +54,7 @@ import org.apache.lucene.store.MockRAMDirectory;
|
|||
import org.apache.lucene.store.NoSuchDirectoryException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestIndexReader extends LuceneTestCase
|
||||
|
@ -313,7 +314,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -355,7 +356,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Directory dir = new RAMDirectory();
|
||||
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
addDoc(writer, "document number " + (i + 1));
|
||||
|
@ -364,7 +365,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
addDocumentWithTermVectorFields(writer);
|
||||
}
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("bin1", bin, Field.Store.YES));
|
||||
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -401,7 +402,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// force optimize
|
||||
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -430,7 +431,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 11; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -475,7 +476,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 11; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -524,7 +525,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
||||
|
@ -569,7 +570,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
@ -623,7 +624,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm2 = new Term("content", "bbb");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
|
@ -639,7 +640,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||
|
||||
// add 100 documents with term : bbb
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm2.text());
|
||||
|
@ -706,7 +707,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// Create initial data set
|
||||
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
|
||||
Directory dir = getDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -716,7 +717,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir = getDirectory();
|
||||
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -742,7 +743,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
else
|
||||
dir = getDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -759,7 +760,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// incremented:
|
||||
Thread.sleep(1000);
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -776,7 +777,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testVersion() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -787,7 +788,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
reader.close();
|
||||
// modify index and check version has been
|
||||
// incremented:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -798,10 +799,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testLock() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
try {
|
||||
reader.deleteDocument(0);
|
||||
|
@ -818,7 +819,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAll() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -835,7 +836,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterClose() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -852,7 +853,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -890,7 +891,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// First build up a starting index:
|
||||
RAMDirectory startDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<157;i++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
@ -1080,7 +1081,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testDocsOutOfOrderJIRA140() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<11;i++) {
|
||||
addDoc(writer, "aaa");
|
||||
}
|
||||
|
@ -1098,7 +1099,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// We must add more docs to get a new segment written
|
||||
for(int i=0;i<11;i++) {
|
||||
|
@ -1120,7 +1121,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "aaa");
|
||||
writer.close();
|
||||
|
||||
|
@ -1196,7 +1197,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// add 100 documents with term : aaa
|
||||
// add 100 documents with term : bbb
|
||||
// add 100 documents with term : ccc
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm1.text());
|
||||
|
@ -1606,7 +1607,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// reuse the doc values arrays in FieldCache
|
||||
public void testFieldCacheReuseAfterClone() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1637,7 +1638,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// FieldCache
|
||||
public void testFieldCacheReuseAfterReopen() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1669,7 +1670,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// reopen switches readOnly
|
||||
public void testReopenChangeReadonly() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1710,7 +1711,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1586: getUniqueTermCount
|
||||
public void testUniqueTermCount() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1743,7 +1744,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1609: don't load terms index
|
||||
public void testNoTermsIndex() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1761,7 +1762,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
|
||||
|
||||
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
@ -1780,7 +1781,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-2046
|
||||
public void testPrepareCommitIsCurrent() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
writer.addDocument(doc);
|
||||
IndexReader r = IndexReader.open(dir, true);
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Tests cloning multiple types of readers, modifying the deletedDocs and norms
|
||||
|
@ -197,7 +198,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
|
||||
TestIndexReaderReopen.createIndex(dir1, true);
|
||||
IndexReader reader1 = IndexReader.open(dir1, false);
|
||||
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
IndexReader reader2 = reader1.clone(true);
|
||||
|
@ -484,7 +485,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
|
||||
public void testCloseStoredFields() throws Exception {
|
||||
final Directory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setUseCompoundFile(false);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.apache.lucene.store.MockRAMDirectory;
|
|||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.BitVector;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestIndexReaderReopen extends LuceneTestCase {
|
||||
|
||||
|
@ -946,7 +947,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
|
||||
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
|
||||
IndexWriter.unlock(dir);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||
|
||||
|
@ -991,7 +992,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
static void modifyIndex(int i, Directory dir) throws IOException {
|
||||
switch (i) {
|
||||
case 0: {
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.deleteDocuments(new Term("field2", "a11"));
|
||||
w.deleteDocuments(new Term("field2", "b30"));
|
||||
w.close();
|
||||
|
@ -1006,13 +1007,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 2: {
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.optimize();
|
||||
w.addDocument(createDocument(102, 4));
|
||||
|
@ -1028,7 +1029,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 5: {
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.close();
|
||||
break;
|
||||
|
@ -1192,7 +1193,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
|
||||
public void testReopenOnCommit() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for(int i=0;i<4;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -86,7 +86,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IndexWriter.setDefaultWriteLockTimeout(2000);
|
||||
assertEquals(2000, IndexWriter.getDefaultWriteLockTimeout());
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
IndexWriter.setDefaultWriteLockTimeout(1000);
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// test doc count before segments are merged/index is optimized
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// optimize the index and check that the new doc count is correct
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
assertEquals(60, writer.numDocs());
|
||||
writer.optimize();
|
||||
|
@ -131,7 +131,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
// make sure opening a new index for create over
|
||||
// this existing one works correctly:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
assertEquals(0, writer.maxDoc());
|
||||
assertEquals(0, writer.numDocs());
|
||||
writer.close();
|
||||
|
@ -174,7 +174,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
long inputDiskUsage = 0;
|
||||
for(int i=0;i<NUM_DIR;i++) {
|
||||
dirs[i] = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dirs[i], new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dirs[i], new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int j=0;j<25;j++) {
|
||||
addDocWithIndex(writer, 25*i+j);
|
||||
}
|
||||
|
@ -188,7 +188,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// Now, build a starting index that has START_COUNT docs. We
|
||||
// will then try to addIndexesNoOptimize into a copy of this:
|
||||
RAMDirectory startDir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int j=0;j<START_COUNT;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
// Make a new dir that will enforce disk usage:
|
||||
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IOException err = null;
|
||||
|
||||
MergeScheduler ms = writer.getMergeScheduler();
|
||||
|
@ -463,7 +463,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
System.out.println("TEST: cycle: diskFree=" + diskFree);
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
dir.setMaxSizeInBytes(diskFree);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
MergeScheduler ms = writer.getMergeScheduler();
|
||||
if (ms instanceof ConcurrentMergeScheduler)
|
||||
|
@ -605,7 +605,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
||||
for(int numDocs=38;numDocs<500;numDocs += 38) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
LogDocMergePolicy ldmp = new LogDocMergePolicy(writer);
|
||||
ldmp.setMinMergeDocs(1);
|
||||
writer.setMergePolicy(ldmp);
|
||||
|
@ -619,7 +619,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
sis.read(dir);
|
||||
final int segCount = sis.size();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergePolicy(ldmp);
|
||||
writer.setMergeFactor(5);
|
||||
writer.optimize(3);
|
||||
|
@ -642,7 +642,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
final Document doc = new Document();
|
||||
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
LogDocMergePolicy ldmp = new LogDocMergePolicy(writer);
|
||||
ldmp.setMinMergeDocs(1);
|
||||
writer.setMergePolicy(ldmp);
|
||||
|
@ -684,7 +684,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testOptimizeTempSpaceUsage() throws IOException {
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int j=0;j<500;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
}
|
||||
|
@ -697,7 +697,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
dir.resetMaxUsedSizeInBytes();
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
long maxDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||
|
@ -728,7 +728,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Directory dir = FSDirectory.open(indexDir);
|
||||
|
||||
// add one document & close writer
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
|
@ -737,7 +737,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("should be one document", reader.numDocs(), 1);
|
||||
|
||||
// now open index for create:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
assertEquals("should be zero documents", writer.maxDoc(), 0);
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
@ -761,7 +761,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -799,7 +799,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
try {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
} catch (Exception e) {
|
||||
fail("writer failed to open on a crashed index");
|
||||
}
|
||||
|
@ -821,7 +821,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -864,7 +864,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer);
|
||||
|
||||
// close
|
||||
|
@ -886,7 +886,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -925,7 +925,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnClose() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 14; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -939,7 +939,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<3;i++) {
|
||||
for(int j=0;j<11;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -971,7 +971,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnCloseAbort() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for (int i = 0; i < 14; i++) {
|
||||
addDoc(writer);
|
||||
|
@ -984,7 +984,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("first number of hits", 14, hits.length);
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
|
@ -1009,7 +1009,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
// Now make sure we can re-open the index, add docs,
|
||||
// and all is good:
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
|
||||
// On abort, writer in fact may write to the same
|
||||
|
@ -1044,7 +1044,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnCloseDiskUsage() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int j=0;j<30;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
}
|
||||
|
@ -1052,7 +1052,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
dir.resetMaxUsedSizeInBytes();
|
||||
|
||||
long startDiskUsage = dir.getMaxUsedSizeInBytes();
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||
for(int j=0;j<1470;j++) {
|
||||
|
@ -1087,14 +1087,14 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
*/
|
||||
public void testCommitOnCloseOptimize() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDocWithIndex(writer, j);
|
||||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
|
||||
// Open a reader before closing (commiting) the writer:
|
||||
|
@ -1116,7 +1116,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertFalse("Reader incorrectly sees that the index is optimized", reader.isOptimized());
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
assertNoUnreferencedFiles(dir, "aborted writer after optimize");
|
||||
|
@ -1131,7 +1131,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testIndexNoDocuments() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
||||
|
@ -1140,7 +1140,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(0, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
||||
|
@ -1152,7 +1152,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testManyFields() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for(int j=0;j<100;j++) {
|
||||
Document doc = new Document();
|
||||
|
@ -1183,7 +1183,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testSmallRAMBuffer() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setRAMBufferSizeMB(0.000001);
|
||||
int lastNumFile = dir.listAll().length;
|
||||
for(int j=0;j<9;j++) {
|
||||
|
@ -1204,7 +1204,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// maxBufferedDocs in a write session
|
||||
public void testChangingRAMBuffer() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
|
||||
|
||||
|
@ -1258,7 +1258,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testChangingRAMBuffer2() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMaxBufferedDeleteTerms(10);
|
||||
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
|
||||
|
@ -1318,7 +1318,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testDiverseDocs() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setRAMBufferSizeMB(0.5);
|
||||
Random rand = newRandom();
|
||||
for(int i=0;i<3;i++) {
|
||||
|
@ -1367,7 +1367,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testEnablingNorms() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
// Enable norms for only 1 doc, pre flush
|
||||
for(int j=0;j<10;j++) {
|
||||
|
@ -1388,7 +1388,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(10, hits.length);
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
// Enable norms for only 1 doc, post flush
|
||||
for(int j=0;j<27;j++) {
|
||||
|
@ -1414,7 +1414,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testHighFreqTerm() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, new IndexWriter.MaxFieldLength(100000000));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, new IndexWriter.MaxFieldLength(100000000));
|
||||
writer.setRAMBufferSizeMB(0.01);
|
||||
// Massive doc that has 128 K a's
|
||||
StringBuilder b = new StringBuilder(1024*1024);
|
||||
|
@ -1461,7 +1461,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
Directory dir = new MyRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -1472,7 +1472,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("did not get right number of hits", 100, hits.length);
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
|
||||
dir.close();
|
||||
|
@ -1480,7 +1480,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testFlushWithNoMerging() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -1499,7 +1499,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// empty doc (no norms) and flush
|
||||
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1518,7 +1518,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
for(int pass=0;pass<2;pass++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergeScheduler(new ConcurrentMergeScheduler());
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -1801,7 +1801,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
failure.setDoFail();
|
||||
dir.failOn(failure);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
|
||||
|
@ -1851,7 +1851,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(reader));
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1934,7 +1934,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(reader));
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2048,7 +2048,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
int delID = 0;
|
||||
for(int i=0;i<20;i++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(2);
|
||||
writer.setUseCompoundFile(false);
|
||||
|
@ -2084,7 +2084,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
if (0 == i % 4) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
@ -2101,7 +2101,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
for(int pass=0;pass<2;pass++) {
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
//System.out.println("TEST: pass=" + pass + " cms=" + (pass >= 2));
|
||||
for(int iter=0;iter<10;iter++) {
|
||||
|
@ -2173,7 +2173,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// Reopen
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
}
|
||||
writer.close();
|
||||
}
|
||||
|
@ -2253,7 +2253,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
for(int iter=0;iter<7;iter++) {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
|
||||
// We expect AlreadyClosedException
|
||||
|
@ -2312,7 +2312,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// OK:
|
||||
public void testImmediateDiskFull() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
dir.setMaxSizeInBytes(dir.getRecomputedActualSizeInBytes());
|
||||
writer.setMaxBufferedDocs(2);
|
||||
final Document doc = new Document();
|
||||
|
@ -2350,7 +2350,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
for(int iter=0;iter<10;iter++) {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
// We expect disk full exceptions in the merge threads
|
||||
cms.setSuppressExceptions();
|
||||
|
@ -2411,7 +2411,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void _testSingleThreadFailure(MockRAMDirectory.Failure failure) throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
final Document doc = new Document();
|
||||
doc.add(new Field("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -2441,7 +2441,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
for(int iter=0;iter<2;iter++) {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
// We expect disk full exceptions in the merge threads
|
||||
cms.setSuppressExceptions();
|
||||
|
@ -2601,7 +2601,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testUnlimitedMaxFieldLength() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -2625,7 +2625,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// add 100 documents
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -2661,7 +2661,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testForceCommit() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(5);
|
||||
|
||||
|
@ -2715,7 +2715,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
FailOnlyInSync failure = new FailOnlyInSync();
|
||||
dir.failOn(failure);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
failure.setDoFail();
|
||||
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
|
@ -2887,7 +2887,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testUserSpecifiedMaxFieldLength() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), new IndexWriter.MaxFieldLength(100000));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), new IndexWriter.MaxFieldLength(100000));
|
||||
|
||||
Document doc = new Document();
|
||||
StringBuilder b = new StringBuilder();
|
||||
|
@ -3048,7 +3048,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1179
|
||||
public void testEmptyFieldName() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -3074,7 +3074,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testExceptionDocumentsWriterInit() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
MockIndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a field", Field.Store.YES,
|
||||
Field.Index.ANALYZED));
|
||||
|
@ -3094,7 +3094,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1208
|
||||
public void testExceptionJustBeforeFlush() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
MockIndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a field", Field.Store.YES,
|
||||
|
@ -3104,7 +3104,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(reader));
|
||||
return new CrashingFilter(fieldName, new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -3144,7 +3144,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1210
|
||||
public void testExceptionOnMergeInit() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter2 w = new MockIndexWriter2(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
MockIndexWriter2 w = new MockIndexWriter2(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
w.setMergeFactor(2);
|
||||
w.doFail = true;
|
||||
|
@ -3182,7 +3182,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1222
|
||||
public void testDoAfterFlush() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter3 w = new MockIndexWriter3(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MockIndexWriter3 w = new MockIndexWriter3(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a field", Field.Store.YES,
|
||||
Field.Index.ANALYZED));
|
||||
|
@ -3235,7 +3235,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testExceptionsDuringCommit() throws Throwable {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
FailOnlyInCommit failure = new FailOnlyInCommit();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a field", Field.Store.YES,
|
||||
Field.Index.ANALYZED));
|
||||
|
@ -3283,7 +3283,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-510
|
||||
public void testInvalidUTF16() throws Throwable {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
|
||||
final int count = utf8Data.length/2;
|
||||
|
@ -3496,7 +3496,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
};
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", tokens));
|
||||
w.addDocument(doc);
|
||||
|
@ -3528,7 +3528,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testPrepareCommit() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(5);
|
||||
|
||||
|
@ -3580,7 +3580,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(5);
|
||||
|
@ -3605,7 +3605,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
reader.close();
|
||||
reader2.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 17; i++)
|
||||
addDoc(writer);
|
||||
|
||||
|
@ -3633,7 +3633,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testPrepareCommitNoChanges() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.prepareCommit();
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
@ -3660,14 +3660,14 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public RunAddIndexesThreads(int numCopy) throws Throwable {
|
||||
NUM_COPY = numCopy;
|
||||
dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++)
|
||||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
dir2 = new MockRAMDirectory();
|
||||
writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
cms = (ConcurrentMergeScheduler) writer2.getMergeScheduler();
|
||||
|
||||
readers = new IndexReader[NUM_COPY];
|
||||
|
@ -3932,7 +3932,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1347
|
||||
public void testRollbackExceptionHang() throws Throwable {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter4 w = new MockIndexWriter4(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MockIndexWriter4 w = new MockIndexWriter4(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
addDoc(w);
|
||||
w.doFail = true;
|
||||
|
@ -3951,7 +3951,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1219
|
||||
public void testBinaryFieldOffsetLength() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
byte[] b = new byte[50];
|
||||
for(int i=0;i<50;i++)
|
||||
b[i] = (byte) (i+77);
|
||||
|
@ -3981,7 +3981,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1382
|
||||
public void testCommitUserData() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
for(int j=0;j<17;j++)
|
||||
addDoc(w);
|
||||
|
@ -3994,7 +3994,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(0, r.getCommitUserData().size());
|
||||
r.close();
|
||||
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
for(int j=0;j<17;j++)
|
||||
addDoc(w);
|
||||
|
@ -4009,7 +4009,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("test1", r.getCommitUserData().get("label"));
|
||||
r.close();
|
||||
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
||||
|
@ -4020,7 +4020,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testOptimizeExceptions() throws IOException {
|
||||
RAMDirectory startDir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(startDir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(startDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
w.setMergeFactor(100);
|
||||
for(int i=0;i<27;i++)
|
||||
|
@ -4029,7 +4029,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
for(int i=0;i<200;i++) {
|
||||
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
((ConcurrentMergeScheduler) w.getMergeScheduler()).setSuppressExceptions();
|
||||
dir.setRandomIOExceptionRate(0.5, 100);
|
||||
try {
|
||||
|
@ -4073,7 +4073,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1442
|
||||
public void testDoubleOffsetCounting() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
|
||||
doc.add(f);
|
||||
|
@ -4108,7 +4108,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1442
|
||||
public void testDoubleOffsetCounting2() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
|
||||
doc.add(f);
|
||||
|
@ -4130,7 +4130,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1448
|
||||
public void testEndOffsetPositionCharAnalyzer() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
Field f = new Field("field", "abcd ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
|
||||
doc.add(f);
|
||||
|
@ -4152,7 +4152,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1448
|
||||
public void testEndOffsetPositionWithCachingTokenFilter() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
Analyzer analyzer = new WhitespaceAnalyzer();
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter w = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
TokenStream stream = new CachingTokenFilter(analyzer.tokenStream("field", new StringReader("abcd ")));
|
||||
|
@ -4176,7 +4176,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1448
|
||||
public void testEndOffsetPositionWithTeeSinkTokenFilter() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
Analyzer analyzer = new WhitespaceAnalyzer();
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter w = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
TeeSinkTokenFilter tee = new TeeSinkTokenFilter(analyzer.tokenStream("field", new StringReader("abcd ")));
|
||||
|
@ -4323,7 +4323,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
out.writeByte((byte) 42);
|
||||
out.close();
|
||||
|
||||
new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED).close();
|
||||
new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED).close();
|
||||
|
||||
assertTrue(dir.fileExists("myrandomfile"));
|
||||
|
||||
|
@ -4339,7 +4339,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testDeadlock() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||
|
@ -4351,7 +4351,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// index has 2 segments
|
||||
|
||||
MockRAMDirectory dir2 = new MockRAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2.addDocument(doc);
|
||||
writer2.close();
|
||||
|
||||
|
@ -4389,7 +4389,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
if (w != null) {
|
||||
w.close();
|
||||
}
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
//((ConcurrentMergeScheduler) w.getMergeScheduler()).setSuppressExceptions();
|
||||
if (!first && !allowInterrupt) {
|
||||
|
@ -4498,30 +4498,30 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testIndexStoreCombos() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
byte[] b = new byte[50];
|
||||
for(int i=0;i<50;i++)
|
||||
b[i] = (byte) (i+77);
|
||||
|
||||
Document doc = new Document();
|
||||
Field f = new Field("binary", b, 10, 17, Field.Store.YES);
|
||||
f.setTokenStream(new WhitespaceTokenizer(new StringReader("doc1field1")));
|
||||
f.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc1field1")));
|
||||
Field f2 = new Field("string", "value", Field.Store.YES,Field.Index.ANALYZED);
|
||||
f2.setTokenStream(new WhitespaceTokenizer(new StringReader("doc1field2")));
|
||||
f2.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc1field2")));
|
||||
doc.add(f);
|
||||
doc.add(f2);
|
||||
w.addDocument(doc);
|
||||
|
||||
// add 2 docs to test in-memory merging
|
||||
f.setTokenStream(new WhitespaceTokenizer(new StringReader("doc2field1")));
|
||||
f2.setTokenStream(new WhitespaceTokenizer(new StringReader("doc2field2")));
|
||||
f.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc2field1")));
|
||||
f2.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc2field2")));
|
||||
w.addDocument(doc);
|
||||
|
||||
// force segment flush so we can force a segment merge with doc3 later.
|
||||
w.commit();
|
||||
|
||||
f.setTokenStream(new WhitespaceTokenizer(new StringReader("doc3field1")));
|
||||
f2.setTokenStream(new WhitespaceTokenizer(new StringReader("doc3field2")));
|
||||
f.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc3field1")));
|
||||
f2.setTokenStream(new WhitespaceTokenizer(Version.LUCENE_CURRENT, new StringReader("doc3field2")));
|
||||
|
||||
w.addDocument(doc);
|
||||
w.commit();
|
||||
|
@ -4560,7 +4560,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
// LUCENE-1727: make sure doc fields are stored in order
|
||||
public void testStoredFieldsOrder() throws Throwable {
|
||||
Directory d = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("zzz", "a b c", Field.Store.YES, Field.Index.NO));
|
||||
doc.add(new Field("aaa", "a b c", Field.Store.YES, Field.Index.NO));
|
||||
|
@ -4592,7 +4592,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
public void testEmbeddedFFFF() throws Throwable {
|
||||
|
||||
Directory d = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
@ -4607,7 +4607,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
|
||||
public void testNoDocsIndex() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
|
||||
writer.setInfoStream(new PrintStream(bos));
|
||||
|
@ -4625,7 +4625,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
final int NUM_THREADS = 5;
|
||||
final double RUN_SEC = 0.5;
|
||||
final Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
final IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.commit();
|
||||
final AtomicBoolean failed = new AtomicBoolean();
|
||||
Thread[] threads = new Thread[NUM_THREADS];
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestIndexWriterDelete extends LuceneTestCase {
|
||||
|
||||
|
@ -42,7 +43,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setUseCompoundFile(true);
|
||||
modifier.setMaxBufferedDeleteTerms(1);
|
||||
|
||||
|
@ -79,7 +80,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -114,7 +115,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testMaxBufferedDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDeleteTerms(1);
|
||||
writer.deleteDocuments(new Term("foobar", "1"));
|
||||
writer.deleteDocuments(new Term("foobar", "1"));
|
||||
|
@ -129,7 +130,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
for(int t=0;t<2;t++) {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(4);
|
||||
modifier.setMaxBufferedDeleteTerms(4);
|
||||
|
||||
|
@ -171,7 +172,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testBothDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(100);
|
||||
modifier.setMaxBufferedDeleteTerms(100);
|
||||
|
||||
|
@ -204,7 +205,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testBatchDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -248,7 +249,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAll() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -295,7 +296,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAllRollback() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -333,7 +334,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testDeleteAllNRT() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -425,7 +426,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// First build up a starting index:
|
||||
MockRAMDirectory startDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < 157; i++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
|
||||
|
@ -448,7 +449,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
||||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
modifier.setMaxBufferedDocs(1000); // use flush or close
|
||||
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
|
||||
|
@ -654,7 +655,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setUseCompoundFile(true);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
|
@ -763,7 +764,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
dir.failOn(failure.reset());
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.util.Random;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -134,7 +135,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testRandomExceptions() throws Throwable {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
writer.setRAMBufferSizeMB(0.1);
|
||||
|
@ -172,7 +173,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testRandomExceptionsThreads() throws Throwable {
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
writer.setRAMBufferSizeMB(0.2);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -34,7 +35,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testNormalCase() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
@ -51,7 +52,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testNoOverMerge() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
@ -73,7 +74,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testForceFlush() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
|
||||
|
@ -84,7 +85,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergePolicy(mp);
|
||||
mp.setMinMergeDocs(100);
|
||||
|
@ -99,7 +100,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMergeFactorChange() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(100);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
@ -125,7 +126,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMaxBufferedDocsChange() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(101);
|
||||
writer.setMergeFactor(101);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
@ -139,7 +140,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(101);
|
||||
writer.setMergeFactor(101);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
@ -170,7 +171,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMergeDocCount0() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(100);
|
||||
|
@ -185,7 +186,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
reader.deleteDocuments(new Term("content", "aaa"));
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(5);
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
|
||||
|
@ -76,7 +77,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// create the index
|
||||
|
@ -111,7 +112,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
assertEquals(0, count(new Term("id", id10), r3));
|
||||
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
|
||||
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -139,7 +140,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
// create the index
|
||||
|
@ -148,7 +149,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
// create a 2nd index
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
|
@ -186,13 +187,13 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create a 2nd index
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
|
@ -221,7 +222,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
// create the index
|
||||
|
@ -260,7 +261,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// reopen the writer to verify the delete made it to the directory
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
|
@ -275,7 +276,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
int numDirs = 3;
|
||||
|
||||
Directory mainDir = new MockRAMDirectory();
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(),
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
mainWriter.setInfoStream(infoStream);
|
||||
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
|
||||
|
@ -383,7 +384,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
this.numDirs = numDirs;
|
||||
this.mainWriter = mainWriter;
|
||||
addDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++) {
|
||||
|
@ -491,7 +492,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
*/
|
||||
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader r1 = writer.getReader();
|
||||
|
@ -529,7 +530,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// test whether the changes made it to the directory
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
// insure the deletes were actually flushed to the directory
|
||||
|
@ -570,7 +571,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
*/
|
||||
public static void createIndex(Directory dir1, String indexName,
|
||||
boolean multiSegment) throws IOException {
|
||||
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -605,7 +606,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
public void testMergeWarmer() throws Exception {
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
|
@ -640,7 +641,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testAfterCommit() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
|
@ -673,7 +674,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Make sure reader remains usable even if IndexWriter closes
|
||||
public void testAfterClose() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
|
@ -703,7 +704,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Stress test reopen during addIndexes
|
||||
public void testDuringAddIndexes() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
writer.setMergeFactor(2);
|
||||
|
@ -781,7 +782,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Stress test reopen during add/delete
|
||||
public void testDuringAddDelete() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
writer.setMergeFactor(2);
|
||||
|
@ -862,7 +863,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testExpungeDeletes() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -887,7 +888,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testDeletesNumDocs() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
|
@ -63,7 +64,7 @@ public class TestLazyBug extends LuceneTestCase {
|
|||
Directory dir = new RAMDirectory();
|
||||
try {
|
||||
Random r = newRandom();
|
||||
Analyzer analyzer = new SimpleAnalyzer();
|
||||
Analyzer analyzer = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setUseCompoundFile(false);
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Tests lazy skipping on the proximity file.
|
||||
|
@ -60,7 +61,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
int numDocs = 500;
|
||||
|
||||
Directory directory = new SeekCountingDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
|
@ -118,7 +119,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
|
||||
public void testSeek() throws IOException {
|
||||
Directory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.document.Field.Store;
|
|||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* This testcase tests whether multi-level skipping is being used
|
||||
|
@ -91,7 +92,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
|
|||
private static class PayloadAnalyzer extends Analyzer {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new PayloadFilter(new LowerCaseTokenizer(reader));
|
||||
return new PayloadFilter(new LowerCaseTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestNRTReaderWithThreads extends LuceneTestCase {
|
||||
Random random = new Random();
|
||||
|
@ -32,7 +33,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
|
|||
|
||||
public void testIndexing() throws Exception {
|
||||
Directory mainDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
IndexReader reader = writer.getReader(); // start pooling readers
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
|
@ -47,7 +48,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
*/
|
||||
public void testEmptyIndex() throws IOException {
|
||||
RAMDirectory rd1 = new MockRAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(), true,
|
||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
iw.close();
|
||||
|
||||
|
@ -55,7 +56,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
|
||||
RAMDirectory rdOut = new MockRAMDirectory();
|
||||
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(), true,
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(rd1,true));
|
||||
|
@ -80,7 +81,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
public void testEmptyIndexWithVectors() throws IOException {
|
||||
RAMDirectory rd1 = new MockRAMDirectory();
|
||||
{
|
||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(), true,
|
||||
IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
|
||||
|
@ -95,7 +96,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
ir.deleteDocument(0);
|
||||
ir.close();
|
||||
|
||||
iw = new IndexWriter(rd1, new SimpleAnalyzer(), false,
|
||||
iw = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), false,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
|
@ -103,7 +104,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
|
||||
RAMDirectory rd2 = new MockRAMDirectory();
|
||||
{
|
||||
IndexWriter iw = new IndexWriter(rd2, new SimpleAnalyzer(), true,
|
||||
IndexWriter iw = new IndexWriter(rd2, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
iw.addDocument(doc);
|
||||
|
@ -112,7 +113,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
|
|||
|
||||
RAMDirectory rdOut = new MockRAMDirectory();
|
||||
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(), true,
|
||||
IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.UNLIMITED);
|
||||
ParallelReader pr = new ParallelReader();
|
||||
pr.add(IndexReader.open(rd1,true));
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -38,7 +39,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
|||
Document doc;
|
||||
|
||||
RAMDirectory rd1 = new RAMDirectory();
|
||||
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
|
||||
|
@ -50,7 +51,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
|
|||
|
||||
iw1.close();
|
||||
RAMDirectory rd2 = new RAMDirectory();
|
||||
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.lucene.store.FSDirectory;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
||||
|
@ -395,7 +396,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
PayloadData payload = fieldToData.get(fieldName);
|
||||
TokenStream ts = new WhitespaceTokenizer(reader);
|
||||
TokenStream ts = new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader);
|
||||
if (payload != null) {
|
||||
if (payload.numFieldInstancesToSkip == 0) {
|
||||
ts = new PayloadFilter(ts, payload.data, payload.offset, payload.length);
|
||||
|
@ -468,7 +469,7 @@ public class TestPayloads extends LuceneTestCase {
|
|||
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
final String field = "test";
|
||||
|
||||
Thread[] ingesters = new Thread[numThreads];
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -102,7 +103,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
|
|||
|
||||
public void testSkipTo(int indexDivisor) throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Term ta = new Term("content","aaa");
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -37,7 +38,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
|||
{
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// ADD 100 documents with term : aaa
|
||||
// add 100 documents with terms: aaa bbb
|
||||
|
@ -53,7 +54,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
|||
verifyDocFreq();
|
||||
|
||||
// merge segments by optimizing the index
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -64,7 +65,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
|
|||
public void testPrevTermAtEnd() throws IOException
|
||||
{
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "aaa bbb");
|
||||
writer.close();
|
||||
SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);
|
||||
|
|
|
@ -26,7 +26,7 @@ import java.util.Random;
|
|||
import java.io.File;
|
||||
|
||||
public class TestStressIndexing extends LuceneTestCase {
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer();
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
private Random RANDOM;
|
||||
|
||||
private static abstract class TimedThread extends Thread {
|
||||
|
|
|
@ -19,6 +19,7 @@ import org.apache.lucene.document.*;
|
|||
import org.apache.lucene.analysis.*;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
|
@ -123,7 +124,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
|
||||
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
||||
Map<String,Document> docs = new HashMap<String,Document>();
|
||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setUseCompoundFile(false);
|
||||
|
||||
/***
|
||||
|
@ -175,7 +176,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
|
||||
Map<String,Document> docs = new HashMap<String,Document>();
|
||||
for(int iter=0;iter<3;iter++) {
|
||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setUseCompoundFile(false);
|
||||
|
||||
// force many merges
|
||||
|
@ -218,7 +219,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
|
|||
|
||||
|
||||
public static void indexSerial(Map<String,Document> docs, Directory dir) throws IOException {
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
// index all docs in a single thread
|
||||
Iterator<Document> iter = docs.values().iterator();
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.store.FSDirectory;
|
|||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.util.English;
|
||||
|
||||
|
@ -34,7 +35,7 @@ import java.io.File;
|
|||
|
||||
public class TestThreadedOptimize extends LuceneTestCase {
|
||||
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer();
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
|
||||
private final static int NUM_THREADS = 3;
|
||||
//private final static int NUM_THREADS = 5;
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.Map;
|
|||
import java.util.HashMap;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -66,7 +67,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
|||
if (last==null)
|
||||
throw new RuntimeException("Couldn't find commit point "+id);
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
new RollbackDeletionPolicy(id), MaxFieldLength.UNLIMITED, last);
|
||||
Map<String,String> data = new HashMap<String,String>();
|
||||
data.put("index", "Rolled back to 1-"+id);
|
||||
|
@ -128,7 +129,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
|||
|
||||
//Build index, of records 1 to 100, committing after each batch of 10
|
||||
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
|
||||
IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(),sdp,MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(Version.LUCENE_CURRENT),sdp,MaxFieldLength.UNLIMITED);
|
||||
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
|
||||
Document doc=new Document();
|
||||
doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
|
||||
|
@ -196,7 +197,7 @@ public class TestTransactionRollback extends LuceneTestCase {
|
|||
for(int i=0;i<2;i++) {
|
||||
// Unless you specify a prior commit point, rollback
|
||||
// should not work:
|
||||
new IndexWriter(dir,new WhitespaceAnalyzer(),
|
||||
new IndexWriter(dir,new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
new DeleteLastCommitPolicy(),
|
||||
MaxFieldLength.UNLIMITED).close();
|
||||
IndexReader r = IndexReader.open(dir, true);
|
||||
|
|
|
@ -88,12 +88,12 @@ public class TestTransactions extends LuceneTestCase
|
|||
@Override
|
||||
public void doWork() throws Throwable {
|
||||
|
||||
IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer1.setMaxBufferedDocs(3);
|
||||
writer1.setMergeFactor(2);
|
||||
((ConcurrentMergeScheduler) writer1.getMergeScheduler()).setSuppressExceptions();
|
||||
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
// Intentionally use different params so flush/merge
|
||||
// happen @ different times
|
||||
writer2.setMaxBufferedDocs(2);
|
||||
|
@ -178,7 +178,7 @@ public class TestTransactions extends LuceneTestCase
|
|||
}
|
||||
|
||||
public void initIndex(Directory dir) throws Throwable {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int j=0; j<7; j++) {
|
||||
Document d = new Document();
|
||||
int n = RANDOM.nextInt();
|
||||
|
|
|
@ -128,7 +128,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
/** Filters LowerCaseTokenizer with StopFilter. */
|
||||
@Override
|
||||
public final TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new QPTestFilter(new LowerCaseTokenizer(reader));
|
||||
return new QPTestFilter(new LowerCaseTokenizer(Version.LUCENE_CURRENT, reader));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +158,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public QueryParser getParser(Analyzer a) throws Exception {
|
||||
if (a == null)
|
||||
a = new SimpleAnalyzer();
|
||||
a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
|
||||
qp.setDefaultOperator(QueryParser.OR_OPERATOR);
|
||||
return qp;
|
||||
|
@ -228,7 +228,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
public Query getQueryDOA(String query, Analyzer a)
|
||||
throws Exception {
|
||||
if (a == null)
|
||||
a = new SimpleAnalyzer();
|
||||
a = new SimpleAnalyzer(Version.LUCENE_CURRENT);
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", a);
|
||||
qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
||||
return qp.parse(query);
|
||||
|
@ -253,8 +253,8 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public void testSimple() throws Exception {
|
||||
assertQueryEquals("term term term", null, "term term term");
|
||||
assertQueryEquals("türm term term", new WhitespaceAnalyzer(), "türm term term");
|
||||
assertQueryEquals("ümlaut", new WhitespaceAnalyzer(), "ümlaut");
|
||||
assertQueryEquals("türm term term", new WhitespaceAnalyzer(Version.LUCENE_CURRENT), "türm term term");
|
||||
assertQueryEquals("ümlaut", new WhitespaceAnalyzer(Version.LUCENE_CURRENT), "ümlaut");
|
||||
|
||||
assertQueryEquals("\"\"", new KeywordAnalyzer(), "");
|
||||
assertQueryEquals("foo:\"\"", new KeywordAnalyzer(), "foo:");
|
||||
|
@ -311,7 +311,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
}
|
||||
|
||||
public void testPunct() throws Exception {
|
||||
Analyzer a = new WhitespaceAnalyzer();
|
||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
assertQueryEquals("a&b", a, "a&b");
|
||||
assertQueryEquals("a&&b", a, "a&&b");
|
||||
assertQueryEquals(".NET", a, ".NET");
|
||||
|
@ -456,7 +456,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
assertQueryEquals("[ a TO z]", null, "[a TO z]");
|
||||
assertEquals(MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT, ((TermRangeQuery)getQuery("[ a TO z]", null)).getRewriteMethod());
|
||||
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
||||
qp.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
||||
assertEquals(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE,((TermRangeQuery)qp.parse("[ a TO z]")).getRewriteMethod());
|
||||
|
||||
|
@ -473,7 +473,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
public void testFarsiRangeCollating() throws Exception {
|
||||
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content","\u0633\u0627\u0628",
|
||||
|
@ -482,7 +482,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
iw.close();
|
||||
IndexSearcher is = new IndexSearcher(ramDir, true);
|
||||
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "content", new WhitespaceAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "content", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
|
||||
// Neither Java 1.4.2 nor 1.5.0 has Farsi Locale collation available in
|
||||
// RuleBasedCollator. However, the Arabic Locale seems to order the Farsi
|
||||
|
@ -580,7 +580,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
final String defaultField = "default";
|
||||
final String monthField = "month";
|
||||
final String hourField = "hour";
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
||||
|
||||
// Don't set any date resolution and verify if DateField is used
|
||||
assertDateRangeQueryEquals(qp, defaultField, startDate, endDate,
|
||||
|
@ -621,7 +621,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
}
|
||||
|
||||
public void testEscaped() throws Exception {
|
||||
Analyzer a = new WhitespaceAnalyzer();
|
||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
|
||||
/*assertQueryEquals("\\[brackets", a, "\\[brackets");
|
||||
assertQueryEquals("\\[brackets", null, "brackets");
|
||||
|
@ -715,7 +715,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
}
|
||||
|
||||
public void testQueryStringEscaping() throws Exception {
|
||||
Analyzer a = new WhitespaceAnalyzer();
|
||||
Analyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
|
||||
assertEscapedQueryEquals("a-b:c", a, "a\\-b\\:c");
|
||||
assertEscapedQueryEquals("a+b:c", a, "a\\+b\\:c");
|
||||
|
@ -844,7 +844,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public void testCustomQueryParserWildcard() {
|
||||
try {
|
||||
new QPTestParser("contents", new WhitespaceAnalyzer()).parse("a?t");
|
||||
new QPTestParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).parse("a?t");
|
||||
fail("Wildcard queries should not be allowed");
|
||||
} catch (ParseException expected) {
|
||||
// expected exception
|
||||
|
@ -853,7 +853,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public void testCustomQueryParserFuzzy() throws Exception {
|
||||
try {
|
||||
new QPTestParser("contents", new WhitespaceAnalyzer()).parse("xunit~");
|
||||
new QPTestParser("contents", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).parse("xunit~");
|
||||
fail("Fuzzy queries should not be allowed");
|
||||
} catch (ParseException expected) {
|
||||
// expected exception
|
||||
|
@ -863,7 +863,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
public void testBooleanQuery() throws Exception {
|
||||
BooleanQuery.setMaxClauseCount(2);
|
||||
try {
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
qp.parse("one two three");
|
||||
fail("ParseException expected due to too many boolean clauses");
|
||||
} catch (ParseException expected) {
|
||||
|
@ -875,7 +875,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
* This test differs from TestPrecedenceQueryParser
|
||||
*/
|
||||
public void testPrecedence() throws Exception {
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
Query query1 = qp.parse("A AND B OR C AND D");
|
||||
Query query2 = qp.parse("+A +B +C +D");
|
||||
assertEquals(query1, query2);
|
||||
|
@ -883,7 +883,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public void testLocalDateFormat() throws IOException, ParseException {
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
|
||||
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
|
||||
iw.close();
|
||||
|
@ -899,7 +899,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
|
||||
public void testStarParsing() throws Exception {
|
||||
final int[] type = new int[1];
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer()) {
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT)) {
|
||||
@Override
|
||||
protected Query getWildcardQuery(String field, String termStr) throws ParseException {
|
||||
// override error checking of superclass
|
||||
|
@ -991,7 +991,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
}
|
||||
|
||||
public void testMatchAllDocs() throws Exception {
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
assertEquals(new MatchAllDocsQuery(), qp.parse("*:*"));
|
||||
assertEquals(new MatchAllDocsQuery(), qp.parse("(*:*)"));
|
||||
BooleanQuery bq = (BooleanQuery)qp.parse("+*:* -*:*");
|
||||
|
@ -1000,7 +1000,7 @@ public class TestQueryParser extends LocalizedTestCase {
|
|||
}
|
||||
|
||||
private void assertHits(int expected, String query, IndexSearcher is) throws ParseException, IOException {
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "date", new WhitespaceAnalyzer());
|
||||
QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "date", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
qp.setLocale(Locale.ENGLISH);
|
||||
Query q = qp.parse(query);
|
||||
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
|||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -97,7 +98,7 @@ public class BaseTestRangeFilter extends LuceneTestCase {
|
|||
try {
|
||||
|
||||
/* build an index */
|
||||
IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(), T,
|
||||
IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
for (int d = minId; d <= maxId; d++) {
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.apache.lucene.index.IndexWriter;
|
|||
import org.apache.lucene.index.MultiReader;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Copyright 2005 Apache Software Foundation
|
||||
|
@ -199,7 +200,7 @@ public class QueryUtils {
|
|||
private static RAMDirectory makeEmptyIndex(final int numDeletedDocs)
|
||||
throws IOException {
|
||||
RAMDirectory d = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < numDeletedDocs; i++) {
|
||||
w.addDocument(new Document());
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < docFields.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -68,14 +68,14 @@ public class TestBoolean2 extends LuceneTestCase {
|
|||
int docCount = 0;
|
||||
do {
|
||||
final Directory copy = new RAMDirectory(dir2);
|
||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.addIndexesNoOptimize(new Directory[] {copy});
|
||||
docCount = w.maxDoc();
|
||||
w.close();
|
||||
mulFactor *= 2;
|
||||
} while(docCount < 3000);
|
||||
|
||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
|
||||
for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
|
||||
|
@ -107,7 +107,7 @@ public class TestBoolean2 extends LuceneTestCase {
|
|||
};
|
||||
|
||||
public Query makeQuery(String queryText) throws ParseException {
|
||||
Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText);
|
||||
Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer(Version.LUCENE_CURRENT))).parse(queryText);
|
||||
return q;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -59,7 +60,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
|||
|
||||
index = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(index,
|
||||
new WhitespaceAnalyzer(),
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
|
|
|
@ -18,6 +18,8 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestSuite;
|
||||
import junit.textui.TestRunner;
|
||||
|
@ -79,8 +81,7 @@ public class TestBooleanPrefixQuery extends LuceneTestCase {
|
|||
Query rw2 = null;
|
||||
IndexReader reader = null;
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(directory, new
|
||||
WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < categories.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
|||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
public class TestBooleanQuery extends LuceneTestCase {
|
||||
|
@ -60,7 +61,7 @@ public class TestBooleanQuery extends LuceneTestCase {
|
|||
// LUCENE-1630
|
||||
public void testNullOrSubScorer() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c d", Field.Store.NO, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestBooleanScorer extends LuceneTestCase
|
||||
{
|
||||
|
@ -44,7 +45,7 @@ public class TestBooleanScorer extends LuceneTestCase
|
|||
String[] values = new String[] { "1", "2", "3", "4" };
|
||||
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.DateTools;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -50,7 +51,7 @@ public class TestDateFilter
|
|||
{
|
||||
// create an index
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
long now = System.currentTimeMillis();
|
||||
|
||||
|
@ -111,7 +112,7 @@ public class TestDateFilter
|
|||
{
|
||||
// create an index
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
long now = System.currentTimeMillis();
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestDateSort extends LuceneTestCase {
|
|||
super.setUp();
|
||||
// Create an index writer.
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// oldest doc:
|
||||
|
@ -76,7 +76,7 @@ public class TestDateSort extends LuceneTestCase {
|
|||
|
||||
Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true));
|
||||
|
||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer());
|
||||
QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
Query query = queryParser.parse("Document");
|
||||
|
||||
// Execute the search and process the search results.
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -79,7 +80,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase{
|
|||
|
||||
index = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(index,
|
||||
new WhitespaceAnalyzer(),
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setSimilarity(sim);
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -39,7 +40,7 @@ public class TestDocBoost extends LuceneTestCase {
|
|||
|
||||
public void testDocBoost() throws Exception {
|
||||
RAMDirectory store = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
||||
Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
public class TestDocIdSet extends LuceneTestCase {
|
||||
|
@ -105,7 +106,7 @@ public class TestDocIdSet extends LuceneTestCase {
|
|||
// Tests that if a Filter produces a null DocIdSet, which is given to
|
||||
// IndexSearcher, everything works fine. This came up in LUCENE-1754.
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("c", "val", Store.NO, Index.NOT_ANALYZED_NO_NORMS));
|
||||
writer.addDocument(doc);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
|
@ -35,7 +36,7 @@ public class TestElevationComparator extends LuceneTestCase {
|
|||
//@Test
|
||||
public void testSorting() throws Throwable {
|
||||
Directory directory = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(1000);
|
||||
writer.addDocument(adoc(new String[] {"id", "a", "title", "ipod", "str_s", "a"}));
|
||||
|
|
|
@ -52,7 +52,7 @@ public class TestExplanations extends LuceneTestCase {
|
|||
public static final String KEY = "KEY";
|
||||
public static final String FIELD = "field";
|
||||
public static final QueryParser qp =
|
||||
new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer());
|
||||
new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
|
@ -64,7 +64,7 @@ public class TestExplanations extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < docFields.length; i++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -40,7 +41,7 @@ public class TestFieldCache extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
long theLong = Long.MAX_VALUE;
|
||||
double theDouble = Double.MAX_VALUE;
|
||||
byte theByte = Byte.MAX_VALUE;
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.analysis.SimpleAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* A basic 'positive' Unit test class for the FieldCacheRangeFilter class.
|
||||
|
@ -531,7 +532,7 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
// test using a sparse index (with deleted docs). The DocIdSet should be not cacheable, as it uses TermDocs if the range contains 0
|
||||
public void testSparseIndex() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), T, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), T, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
for (int d = -20; d <= 20; d++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.search.BooleanClause.Occur;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.DocIdBitSet;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.util.BitSet;
|
||||
|
||||
|
@ -49,7 +50,7 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add (new Field("field", "one two three four five", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -50,13 +51,13 @@ public class TestFilteredSearch extends LuceneTestCase {
|
|||
RAMDirectory directory = new RAMDirectory();
|
||||
int[] filterBits = {1, 36};
|
||||
SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
||||
// run the test on more than one segment
|
||||
enforceSingleSegment = false;
|
||||
// reset - it is stateful
|
||||
filter.reset();
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
// we index 60 docs - this will create 6 segments
|
||||
writer.setMaxBufferedDocs(10);
|
||||
searchFiltered(writer, directory, filter, enforceSingleSegment);
|
||||
|
|
|
@ -43,7 +43,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
|
||||
public void testFuzziness() throws Exception {
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc("aaaaa", writer);
|
||||
addDoc("aaaab", writer);
|
||||
addDoc("aaabb", writer);
|
||||
|
@ -200,7 +200,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
|
||||
public void testFuzzinessLong() throws Exception {
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc("aaaaaaa", writer);
|
||||
addDoc("segment", writer);
|
||||
writer.optimize();
|
||||
|
@ -288,7 +288,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
|
||||
public void testTokenLengthOpt() throws IOException {
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc("12345678911", writer);
|
||||
addDoc("segment", writer);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.LinkedList;
|
||||
|
@ -46,7 +47,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
|
|||
|
||||
public void testPhrasePrefix() throws IOException {
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
add("blueberry pie", writer);
|
||||
add("blueberry strudel", writer);
|
||||
add("blueberry pizza", writer);
|
||||
|
@ -140,7 +141,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
|
|||
// The contained PhraseMultiQuery must contain exactly one term array.
|
||||
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
add("blueberry pie", writer);
|
||||
add("blueberry chewing gum", writer);
|
||||
add("blue raspberry pie", writer);
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.index.IndexWriter;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.Collator;
|
||||
|
@ -65,7 +66,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
"X 4 5 6" };
|
||||
|
||||
small = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(small, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
|
@ -616,7 +617,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
|
||||
/* build an index */
|
||||
RAMDirectory farsiIndex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(), T,
|
||||
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
|
||||
|
@ -656,7 +657,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
|
||||
/* build an index */
|
||||
RAMDirectory danishIndex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(danishIndex, new SimpleAnalyzer(), T,
|
||||
IndexWriter writer = new IndexWriter(danishIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// Danish collation orders the words below in the given order
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -41,7 +42,7 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
|
|||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
IndexWriter writer
|
||||
= new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
= new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
//writer.setUseCompoundFile(false);
|
||||
//writer.infoStream = System.out;
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.index.IndexWriter;
|
|||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
||||
|
||||
|
@ -43,7 +44,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
|||
final Random rnd = newRandom();
|
||||
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, MaxFieldLength.UNLIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.US));
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ public class TestNot extends LuceneTestCase {
|
|||
|
||||
public void testNot() throws Exception {
|
||||
RAMDirectory store = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document d1 = new Document();
|
||||
d1.add(new Field("field", "a b", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -49,7 +49,7 @@ public class TestNot extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
Searcher searcher = new IndexSearcher(store, true);
|
||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
|
||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer(Version.LUCENE_CURRENT));
|
||||
Query query = parser.parse("a NOT b");
|
||||
//System.out.println(query);
|
||||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestNumericRangeQuery32 extends LuceneTestCase {
|
||||
// distance of entries
|
||||
|
@ -46,7 +47,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
|||
BooleanQuery.setMaxClauseCount(3*255*2 + 255);
|
||||
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
NumericField
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestNumericRangeQuery64 extends LuceneTestCase {
|
||||
// distance of entries
|
||||
|
@ -45,7 +46,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
|||
BooleanQuery.setMaxClauseCount(7*255*2 + 255);
|
||||
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(),
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT),
|
||||
true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
NumericField
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -48,7 +49,7 @@ public class TestPhrasePrefixQuery
|
|||
throws IOException
|
||||
{
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc1 = new Document();
|
||||
Document doc2 = new Document();
|
||||
Document doc3 = new Document();
|
||||
|
|
|
@ -51,7 +51,7 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
Analyzer analyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new WhitespaceTokenizer(reader);
|
||||
return new WhitespaceTokenizer(Version.LUCENE_CURRENT, reader);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -239,7 +239,7 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
|
||||
public void testPhraseQueryInConjunctionScorer() throws Exception {
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
|
@ -275,7 +275,7 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
doc = new Document();
|
||||
doc.add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -325,7 +325,7 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
|
||||
public void testSlopScoring() throws IOException {
|
||||
Directory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -225,7 +225,7 @@ public class TestPositionIncrement extends LuceneTestCase {
|
|||
|
||||
private static class StopWhitespaceAnalyzer extends Analyzer {
|
||||
boolean enablePositionIncrements;
|
||||
final WhitespaceAnalyzer a = new WhitespaceAnalyzer();
|
||||
final WhitespaceAnalyzer a = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
public StopWhitespaceAnalyzer(boolean enablePositionIncrements) {
|
||||
this.enablePositionIncrements = enablePositionIncrements;
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ class TestPayloadAnalyzer extends Analyzer {
|
|||
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
TokenStream result = new LowerCaseTokenizer(reader);
|
||||
TokenStream result = new LowerCaseTokenizer(Version.LUCENE_CURRENT, reader);
|
||||
return new PayloadFilter(result, fieldName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -37,7 +38,7 @@ public class TestPrefixFilter extends LuceneTestCase {
|
|||
"/Computers/Mac/One",
|
||||
"/Computers/Mac/Two",
|
||||
"/Computers/Windows"};
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < categories.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -46,7 +47,7 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
|||
super.setUp();
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory,
|
||||
new WhitespaceAnalyzer(), true,
|
||||
new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
for (int i = 0; i < 5137; ++i) {
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
@ -36,7 +37,7 @@ public class TestPrefixQuery extends LuceneTestCase {
|
|||
String[] categories = new String[] {"/Computers",
|
||||
"/Computers/Mac",
|
||||
"/Computers/Windows"};
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < categories.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
|
||||
public class TestQueryTermVector extends LuceneTestCase {
|
||||
|
@ -42,7 +43,7 @@ public class TestQueryTermVector extends LuceneTestCase {
|
|||
result = new QueryTermVector(null);
|
||||
assertTrue(result.getTerms().length == 0);
|
||||
|
||||
result = new QueryTermVector("foo bar foo again foo bar go go go", new WhitespaceAnalyzer());
|
||||
result = new QueryTermVector("foo bar foo again foo bar go go go", new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
assertTrue(result != null);
|
||||
terms = result.getTerms();
|
||||
assertTrue(terms.length == 4);
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.apache.lucene.search;
|
|||
|
||||
import org.apache.lucene.util.DocIdBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.util.Random;
|
||||
import java.util.BitSet;
|
||||
|
@ -45,7 +46,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
// Create a dummy index with nothing in it.
|
||||
// This could possibly fail if Lucene starts checking for docid ranges...
|
||||
RAMDirectory rd = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(rd,new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter iw = new IndexWriter(rd,new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.addDocument(new Document());
|
||||
iw.close();
|
||||
s = new IndexSearcher(rd, true);
|
||||
|
@ -60,7 +61,7 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
terms[i] = new Term("f",Character.toString((char)('A'+i)));
|
||||
}
|
||||
|
||||
IndexWriter iw = new IndexWriter(dir,new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter iw = new IndexWriter(dir,new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i=0; i<nDocs; i++) {
|
||||
Document d = new Document();
|
||||
for (int j=0; j<nTerms; j++) {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.search;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -39,7 +40,7 @@ public class TestSetNorm extends LuceneTestCase {
|
|||
|
||||
public void testSetNorm() throws Exception {
|
||||
RAMDirectory store = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// add the same document four times
|
||||
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.ANALYZED);
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -64,7 +65,7 @@ public class TestSimilarity extends LuceneTestCase {
|
|||
|
||||
public void testSimilarity() throws Exception {
|
||||
RAMDirectory store = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setSimilarity(new SimpleSimilarity());
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -117,7 +118,7 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
query.setSlop(slop);
|
||||
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
|
||||
WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(ramDir, analyzer, MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.lucene.store.LockObtainFailedException;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.DocIdBitSet;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Unit tests for sorting code.
|
||||
|
@ -103,7 +104,7 @@ public class TestSort extends LuceneTestCase implements Serializable {
|
|||
private Searcher getIndex (boolean even, boolean odd)
|
||||
throws IOException {
|
||||
RAMDirectory indexStore = new RAMDirectory ();
|
||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(1000);
|
||||
for (int i=0; i<data.length; ++i) {
|
||||
|
@ -139,7 +140,7 @@ public class TestSort extends LuceneTestCase implements Serializable {
|
|||
|
||||
private IndexSearcher getFullStrings() throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
RAMDirectory indexStore = new RAMDirectory ();
|
||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(4);
|
||||
writer.setMergeFactor(97);
|
||||
for (int i=0; i<NUM_STRINGS; i++) {
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.English;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
public class TestSpanQueryFilter extends LuceneTestCase {
|
||||
|
||||
|
@ -39,7 +40,7 @@ public class TestSpanQueryFilter extends LuceneTestCase {
|
|||
|
||||
public void testFilterWorks() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 500; i++) {
|
||||
Document document = new Document();
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.analysis.SimpleAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* A basic 'positive' Unit test class for the TermRangeFilter class.
|
||||
|
@ -339,7 +340,7 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
|
|||
|
||||
/* build an index */
|
||||
RAMDirectory farsiIndex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(), T,
|
||||
IndexWriter writer = new IndexWriter(farsiIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content","\u0633\u0627\u0628",
|
||||
|
@ -379,7 +380,7 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
|
|||
/* build an index */
|
||||
RAMDirectory danishIndex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter
|
||||
(danishIndex, new SimpleAnalyzer(), T,
|
||||
(danishIndex, new SimpleAnalyzer(Version.LUCENE_CURRENT), T,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
// Danish collation orders the words below in the given order
|
||||
// (example taken from TestSort.testInternationalSort() ).
|
||||
|
|
|
@ -28,6 +28,8 @@ import org.apache.lucene.analysis.Tokenizer;
|
|||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.Locale;
|
||||
|
@ -309,7 +311,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void initializeIndex(String[] values) throws IOException {
|
||||
initializeIndex(values, new WhitespaceAnalyzer());
|
||||
initializeIndex(values, new WhitespaceAnalyzer(Version.LUCENE_CURRENT));
|
||||
}
|
||||
|
||||
private void initializeIndex(String[] values, Analyzer analyzer) throws IOException {
|
||||
|
@ -321,7 +323,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void addDoc(String content) throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
insertDoc(writer, content);
|
||||
writer.close();
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -51,7 +52,7 @@ public class TestTermScorer extends LuceneTestCase
|
|||
directory = new RAMDirectory();
|
||||
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < values.length; i++)
|
||||
{
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.lucene.search;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
@ -41,7 +42,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
//writer.setUseCompoundFile(true);
|
||||
//writer.infoStream = System.out;
|
||||
|
@ -93,7 +94,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
|
||||
public void testTermVectorsFieldOrder() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("c", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
doc.add(new Field("a", "some content here", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
@ -231,7 +232,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
assertTrue(writer != null);
|
||||
writer.addDocument(testDoc1);
|
||||
|
@ -347,7 +348,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
|
||||
// Test only a few docs having vectors
|
||||
public void testRareVectors() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<100;i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -379,7 +380,7 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
// In a single doc, for the same field, mix the term
|
||||
// vectors up
|
||||
public void testMixedVectrosVectors() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true,
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(Version.LUCENE_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "one",
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue