LUCENE-1084: fix the remaining deprecated calls to IndexWriter ctors

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@618764 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2008-02-05 20:07:29 +00:00
parent 18adf788d8
commit 7ea8bd35f4
75 changed files with 195 additions and 195 deletions

View File

@ -259,7 +259,7 @@ public class IndexTask extends Task {
log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE);
IndexWriter writer =
new IndexWriter(indexDir, analyzer, create);
new IndexWriter(indexDir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundIndex);
int totalFiles = 0;

View File

@ -98,7 +98,7 @@ public class CreateIndexTask extends PerfTask {
IndexWriter writer = new IndexWriter(runData.getDirectory(),
runData.getConfig().get("autocommit", OpenIndexTask.DEFAULT_AUTO_COMMIT),
runData.getAnalyzer(),
true);
true, IndexWriter.MaxFieldLength.LIMITED);
CreateIndexTask.setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;

View File

@ -50,7 +50,7 @@ public class OpenIndexTask extends PerfTask {
IndexWriter writer = new IndexWriter(runData.getDirectory(),
config.get("autocommit", DEFAULT_AUTO_COMMIT),
runData.getAnalyzer(),
false);
false, IndexWriter.MaxFieldLength.LIMITED);
CreateIndexTask.setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;

View File

@ -90,7 +90,7 @@ public class TestPerfTasksLogic extends TestCase {
assertEquals("TestSearchTask was supposed to be called!",279,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
@ -127,7 +127,7 @@ public class TestPerfTasksLogic extends TestCase {
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());

View File

@ -172,7 +172,7 @@ public class IndexModifier {
this.directory = directory;
synchronized(this.directory) {
this.analyzer = analyzer;
indexWriter = new IndexWriter(directory, analyzer, create);
indexWriter = new IndexWriter(directory, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
open = true;
}
}
@ -201,7 +201,7 @@ public class IndexModifier {
indexReader.close();
indexReader = null;
}
indexWriter = new IndexWriter(directory, analyzer, false);
indexWriter = new IndexWriter(directory, analyzer, false, IndexWriter.MaxFieldLength.LIMITED);
// IndexModifier cannot use ConcurrentMergeScheduler
// because it synchronizes on the directory which can
// cause deadlock

View File

@ -66,7 +66,7 @@ public class TestBinaryDocument extends LuceneTestCase
/** add the doc to a ram index */
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.close();

View File

@ -160,7 +160,7 @@ public class TestDocument extends LuceneTestCase
public void testGetValuesForIndexedDocument() throws Exception
{
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(makeDocumentWithFields());
writer.close();
@ -231,7 +231,7 @@ public class TestDocument extends LuceneTestCase
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.UN_TOKENIZED));
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
field.setValue("id2");
writer.addDocument(doc);

View File

@ -231,7 +231,7 @@ class DocHelper {
*/
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException
{
IndexWriter writer = new IndexWriter(dir, analyzer);
IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
writer.setSimilarity(similarity);
//writer.setUseCompoundFile(false);
writer.addDocument(doc);

View File

@ -326,7 +326,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
private IndexWriter newWriter(Directory dir, boolean create)
throws IOException {
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create);
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergePolicy(new LogDocMergePolicy());
return writer;
}

View File

@ -118,7 +118,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
TimedThread[] threads = new TimedThread[4];
IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
// Establish a base index of 100 docs:
for(int i=0;i<100;i++) {

View File

@ -202,7 +202,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
Directory dir = FSDirectory.getDirectory(dirName);
// open writer
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
// add 10 docs
for(int i=0;i<10;i++) {
@ -240,7 +240,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
searcher.close();
// optimize
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@ -290,7 +290,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
searcher.close();
// optimize
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@ -312,7 +312,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
dirName = fullDir(dirName);
Directory dir = FSDirectory.getDirectory(dirName);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(doCFS);
writer.setMaxBufferedDocs(10);
@ -347,7 +347,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
boolean autoCommit = 0 == pass;
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setRAMBufferSizeMB(16.0);
//IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
for(int i=0;i<35;i++) {

View File

@ -63,7 +63,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
writer.setMaxBufferedDocs(2);
@ -100,7 +100,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
@ -145,7 +145,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
for(int pass=0;pass<2;pass++) {
boolean autoCommit = pass==0;
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
for(int iter=0;iter<7;iter++) {
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
@ -162,7 +162,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles autoCommit=" + autoCommit);
// Reopen
writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
writer = new IndexWriter(directory, autoCommit, ANALYZER, false, IndexWriter.MaxFieldLength.LIMITED);
}
writer.close();
@ -180,7 +180,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
for(int pass=0;pass<2;pass++) {
boolean autoCommit = pass==0;
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
for(int iter=0;iter<10;iter++) {
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
@ -212,7 +212,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
reader.close();
// Reopen
writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
writer = new IndexWriter(directory, autoCommit, ANALYZER, false, IndexWriter.MaxFieldLength.LIMITED);
}
writer.close();
}

View File

@ -187,7 +187,7 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer.close();
@ -196,7 +196,7 @@ public class TestDeletionPolicy extends LuceneTestCase
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
@ -256,7 +256,7 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
for(int i=0;i<107;i++) {
@ -264,7 +264,7 @@ public class TestDeletionPolicy extends LuceneTestCase
}
writer.close();
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer.optimize();
writer.close();
@ -294,7 +294,7 @@ public class TestDeletionPolicy extends LuceneTestCase
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.list().length;
writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
int postCount = dir.list().length;
assertTrue(postCount < preCount);
@ -320,7 +320,7 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
for(int i=0;i<107;i++) {
@ -328,7 +328,7 @@ public class TestDeletionPolicy extends LuceneTestCase
}
writer.close();
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer.optimize();
writer.close();
@ -368,7 +368,7 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j<N+1;j++) {
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
for(int i=0;i<17;i++) {
@ -428,14 +428,14 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer.close();
Term searchTerm = new Term("content", "aaa");
Query query = new TermQuery(searchTerm);
for(int i=0;i<N+1;i++) {
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
@ -452,7 +452,7 @@ public class TestDeletionPolicy extends LuceneTestCase
reader.close();
searcher.close();
}
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer.optimize();
// this is a commit when autoCommit=false:
@ -531,7 +531,7 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
IndexWriter writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
writer.close();
@ -540,7 +540,7 @@ public class TestDeletionPolicy extends LuceneTestCase
for(int i=0;i<N+1;i++) {
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
for(int j=0;j<17;j++) {
@ -558,7 +558,7 @@ public class TestDeletionPolicy extends LuceneTestCase
reader.close();
searcher.close();
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy);
writer = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), true, policy, IndexWriter.MaxFieldLength.LIMITED);
// This will not commit: there are no changes
// pending because we opened for "create":
writer.close();

View File

@ -107,7 +107,7 @@ public class TestDoc extends LuceneTestCase {
PrintWriter out = new PrintWriter(sw, true);
Directory directory = FSDirectory.getDirectory(indexDir);
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
SegmentInfo si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);
@ -135,7 +135,7 @@ public class TestDoc extends LuceneTestCase {
out = new PrintWriter(sw, true);
directory = FSDirectory.getDirectory(indexDir);
writer = new IndexWriter(directory, new SimpleAnalyzer(), true);
writer = new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);

View File

@ -51,7 +51,7 @@ public class TestDocumentWriter extends LuceneTestCase {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
Analyzer analyzer = new WhitespaceAnalyzer();
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(testDoc);
writer.flush();
SegmentInfo info = writer.newestSegment();
@ -106,7 +106,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.TOKENIZED));
@ -166,7 +166,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.TOKENIZED));
@ -191,7 +191,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("preanalyzed", new TokenStream() {
@ -246,7 +246,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(new Field("f2", "v2", Store.YES, Index.UN_TOKENIZED, TermVector.NO));
RAMDirectory ram = new RAMDirectory();
IndexWriter writer = new IndexWriter(ram, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(ram, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.close();

View File

@ -46,7 +46,7 @@ public class TestFieldsReader extends LuceneTestCase {
fieldInfos = new FieldInfos();
DocHelper.setupDoc(testDoc);
fieldInfos.add(testDoc);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
writer.addDocument(testDoc);
writer.close();
@ -204,7 +204,7 @@ public class TestFieldsReader extends LuceneTestCase {
FSDirectory tmpDir = FSDirectory.getDirectory(file);
assertTrue(tmpDir != null);
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
writer.addDocument(testDoc);
writer.close();

View File

@ -50,7 +50,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
int i;
for(i=0;i<35;i++) {
@ -155,7 +155,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
// Open & close a writer: it should delete the above 4
// files and nothing more:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
String[] files2 = dir.list();

View File

@ -55,19 +55,19 @@ public class TestIndexReader extends LuceneTestCase
public void testIsCurrent() throws Exception
{
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
// set up reader:
IndexReader reader = IndexReader.open(d);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, new StandardAnalyzer(), false);
writer = new IndexWriter(d, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, new StandardAnalyzer(), true);
writer = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
@ -83,7 +83,7 @@ public class TestIndexReader extends LuceneTestCase
{
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
// set up reader
@ -95,7 +95,7 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(fieldNames.contains("unstored"));
reader.close();
// add more documents
writer = new IndexWriter(d, new StandardAnalyzer(), false);
writer = new IndexWriter(d, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
{
@ -175,7 +175,7 @@ public class TestIndexReader extends LuceneTestCase
public void testTermVectors() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
// new termvector fields
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
@ -243,7 +243,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 100 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
addDoc(writer, searchTerm.text());
@ -291,7 +291,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 11; i++)
{
addDoc(writer, searchTerm.text());
@ -336,7 +336,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 11; i++)
{
addDoc(writer, searchTerm.text());
@ -385,7 +385,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc(writer, searchTerm.text());
writer.close();
@ -430,7 +430,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
addDoc(writer, searchTerm.text());
writer.close();
@ -484,7 +484,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm2 = new Term("content", "bbb");
// add 100 documents with term : aaa
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
addDoc(writer, searchTerm.text());
@ -500,7 +500,7 @@ public class TestIndexReader extends LuceneTestCase
assertTermDocsCount("first reader", reader, searchTerm2, 0);
// add 100 documents with term : bbb
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
addDoc(writer, searchTerm2.text());
@ -567,7 +567,7 @@ public class TestIndexReader extends LuceneTestCase
// Create initial data set
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
Directory dir = getDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc(writer, "test");
writer.close();
dir.close();
@ -577,7 +577,7 @@ public class TestIndexReader extends LuceneTestCase
dir = getDirectory();
// Now create the data set again, just as before
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc(writer, "test");
writer.close();
dir.close();
@ -604,7 +604,7 @@ public class TestIndexReader extends LuceneTestCase
else
dir = getDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
assertTrue(IndexReader.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -628,7 +628,7 @@ public class TestIndexReader extends LuceneTestCase
}
}
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir);
@ -646,7 +646,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(IndexReader.indexExists("there_is_no_such_index"));
Directory dir = new MockRAMDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
assertTrue(IndexReader.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -657,7 +657,7 @@ public class TestIndexReader extends LuceneTestCase
reader.close();
// modify index and check version has been
// incremented:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir);
@ -668,10 +668,10 @@ public class TestIndexReader extends LuceneTestCase
public void testLock() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
IndexReader reader = IndexReader.open(dir);
try {
reader.deleteDocument(0);
@ -688,7 +688,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -705,7 +705,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterClose() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -722,7 +722,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -760,7 +760,7 @@ public class TestIndexReader extends LuceneTestCase
// First build up a starting index:
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for(int i=0;i<157;i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.UN_TOKENIZED));
@ -944,7 +944,7 @@ public class TestIndexReader extends LuceneTestCase
public void testDocsOutOfOrderJIRA140() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for(int i=0;i<11;i++) {
addDoc(writer, "aaa");
}
@ -962,7 +962,7 @@ public class TestIndexReader extends LuceneTestCase
}
reader.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
// We must add more docs to get a new segment written
for(int i=0;i<11;i++) {
@ -984,7 +984,7 @@ public class TestIndexReader extends LuceneTestCase
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc(writer, "aaa");
writer.close();
@ -1058,7 +1058,7 @@ public class TestIndexReader extends LuceneTestCase
// add 100 documents with term : aaa
// add 100 documents with term : bbb
// add 100 documents with term : ccc
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
addDoc(writer, searchTerm1.text());

View File

@ -548,7 +548,7 @@ public class TestIndexReaderReopen extends TestCase {
final Directory dir = new RAMDirectory();
final int n = 150;
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer());
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
@ -566,7 +566,7 @@ public class TestIndexReaderReopen extends TestCase {
modifier.deleteDocument(i);
modifier.close();
} else {
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer());
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.addDocument(createDocument(n + i, 6));
modifier.close();
}
@ -781,7 +781,7 @@ public class TestIndexReaderReopen extends TestCase {
}
private static void createIndex(Directory dir, boolean multiSegment) throws IOException {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer());
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
w.setMergePolicy(new LogDocMergePolicy());
@ -824,7 +824,7 @@ public class TestIndexReaderReopen extends TestCase {
private static void modifyIndex(int i, Directory dir) throws IOException {
switch (i) {
case 0: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer());
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.close();
@ -839,13 +839,13 @@ public class TestIndexReaderReopen extends TestCase {
break;
}
case 2: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer());
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
w.optimize();
w.close();
break;
}
case 3: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer());
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
w.addDocument(createDocument(101, 4));
w.optimize();
w.addDocument(createDocument(102, 4));

View File

@ -47,7 +47,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory dir = new RAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setUseCompoundFile(true);
modifier.setMaxBufferedDeleteTerms(1);
@ -74,7 +74,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
int hitCount = getHitCount(dir, term);
assertEquals(1, hitCount);
if (!autoCommit) {
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setUseCompoundFile(true);
}
modifier.deleteDocuments(term);
@ -98,7 +98,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory dir = new RAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
@ -122,7 +122,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
reader.close();
if (!autoCommit) {
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
}
@ -150,7 +150,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
boolean autoCommit = (0==pass);
Directory dir = new RAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(4);
modifier.setMaxBufferedDeleteTerms(4);
@ -193,7 +193,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory dir = new RAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(100);
modifier.setMaxBufferedDeleteTerms(100);
@ -234,7 +234,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
boolean autoCommit = (0==pass);
Directory dir = new RAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
@ -255,7 +255,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
if (!autoCommit) {
modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer());
new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
}
@ -278,7 +278,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
if (!autoCommit) {
modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer());
new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
}
@ -340,7 +340,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// First build up a starting index:
RAMDirectory startDir = new RAMDirectory();
IndexWriter writer = new IndexWriter(startDir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
@ -362,7 +362,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
while (!done) {
MockRAMDirectory dir = new MockRAMDirectory(startDir);
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer());
new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(1000); // use flush or close
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
@ -597,7 +597,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
boolean autoCommit = (0==pass);
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setUseCompoundFile(true);
modifier.setMaxBufferedDeleteTerms(2);
@ -633,7 +633,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// open the writer again (closed above)
if (!autoCommit) {
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer());
modifier = new IndexWriter(dir, autoCommit, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.setUseCompoundFile(true);
}
@ -726,7 +726,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
boolean autoCommit = (0==pass);
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir, autoCommit,
new WhitespaceAnalyzer(), true);
new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
dir.failOn(failure.reset());

View File

@ -75,10 +75,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
IndexWriter im;
try {
im = new IndexWriter(this.__test_dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(), false);
im = new IndexWriter(this.__test_dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
} catch (FileNotFoundException e) {
try {
im = new IndexWriter(this.__test_dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(), false);
im = new IndexWriter(this.__test_dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
} catch (FileNotFoundException e1) {
}
}

View File

@ -34,7 +34,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNormalCase() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy());
@ -51,7 +51,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNoOverMerge() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy());
@ -73,7 +73,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testForceFlush() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
LogDocMergePolicy mp = new LogDocMergePolicy();
@ -84,7 +84,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
addDoc(writer);
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergePolicy(mp);
mp.setMinMergeDocs(100);
@ -99,7 +99,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMergeFactorChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(100);
writer.setMergePolicy(new LogDocMergePolicy());
@ -125,7 +125,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMaxBufferedDocsChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(101);
writer.setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy());
@ -139,7 +139,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
}
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(101);
writer.setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy());
@ -167,7 +167,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMergeDocCount0() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergePolicy(new LogDocMergePolicy());
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(100);
@ -182,7 +182,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
reader.deleteDocuments(new Term("content", "aaa"));
reader.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergePolicy(new LogDocMergePolicy());
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(5);

View File

@ -56,7 +56,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.addIndexes(new Directory[]{indexA, indexB});
@ -92,7 +92,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
{
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);

View File

@ -67,7 +67,7 @@ public class TestLazyBug extends LuceneTestCase {
try {
Random r = new Random(BASE_SEED + 42) ;
Analyzer analyzer = new SimpleAnalyzer();
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);

View File

@ -49,7 +49,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
int numDocs = 500;
Directory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
@ -109,7 +109,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
public void testSeek() throws IOException {
Directory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -150,7 +150,7 @@ public class TestMultiSegmentReader extends LuceneTestCase {
}
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create);
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("body", s, Field.Store.YES, Field.Index.TOKENIZED));
iw.addDocument(doc);

View File

@ -109,7 +109,7 @@ public class TestNorms extends LuceneTestCase {
Directory dir3 = FSDirectory.getDirectory(indexDir3);
createIndex(dir3);
IndexWriter iw = new IndexWriter(dir3,anlzr,false);
IndexWriter iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.addIndexes(new Directory[]{dir1,dir2});
@ -126,7 +126,7 @@ public class TestNorms extends LuceneTestCase {
doTestNorms(dir3);
// now with optimize
iw = new IndexWriter(dir3,anlzr,false);
iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.optimize();
@ -152,7 +152,7 @@ public class TestNorms extends LuceneTestCase {
}
private void createIndex(Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir,anlzr,true);
IndexWriter iw = new IndexWriter(dir,anlzr,true, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);
@ -194,7 +194,7 @@ public class TestNorms extends LuceneTestCase {
}
private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir,anlzr,false);
IndexWriter iw = new IndexWriter(dir,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);

View File

@ -106,7 +106,7 @@ public class TestParallelReader extends LuceneTestCase {
// one document only:
Directory dir2 = new MockRAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true);
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
w2.addDocument(d3);
@ -151,13 +151,13 @@ public class TestParallelReader extends LuceneTestCase {
Directory dir2 = getDir1();
// add another document to ensure that the indexes are not optimized
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer());
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
Document d = new Document();
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
modifier.addDocument(d);
modifier.close();
modifier = new IndexWriter(dir2, new StandardAnalyzer());
modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
d = new Document();
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.TOKENIZED));
modifier.addDocument(d);
@ -170,7 +170,7 @@ public class TestParallelReader extends LuceneTestCase {
assertFalse(pr.isOptimized());
pr.close();
modifier = new IndexWriter(dir1, new StandardAnalyzer());
modifier = new IndexWriter(dir1, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.optimize();
modifier.close();
@ -182,7 +182,7 @@ public class TestParallelReader extends LuceneTestCase {
pr.close();
modifier = new IndexWriter(dir2, new StandardAnalyzer());
modifier = new IndexWriter(dir2, new StandardAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
modifier.optimize();
modifier.close();
@ -214,7 +214,7 @@ public class TestParallelReader extends LuceneTestCase {
// Fields 1-4 indexed together:
private Searcher single() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.TOKENIZED));
@ -244,7 +244,7 @@ public class TestParallelReader extends LuceneTestCase {
private Directory getDir1() throws IOException {
Directory dir1 = new MockRAMDirectory();
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true);
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.TOKENIZED));
@ -259,7 +259,7 @@ public class TestParallelReader extends LuceneTestCase {
private Directory getDir2() throws IOException {
Directory dir2 = new RAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true);
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -38,7 +38,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
Document doc;
RAMDirectory rd1 = new RAMDirectory();
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true);
IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
doc = new Document();
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
@ -50,7 +50,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
iw1.close();
RAMDirectory rd2 = new RAMDirectory();
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true);
IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
doc = new Document();
doc.add(new Field("field0", "", Store.NO, Index.TOKENIZED));

View File

@ -95,7 +95,7 @@ public class TestPayloads extends LuceneTestCase {
public void testPayloadFieldBit() throws Exception {
Directory ram = new RAMDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(ram, analyzer, true);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document d = new Document();
// this field won't have any payloads
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.TOKENIZED));
@ -123,7 +123,7 @@ public class TestPayloads extends LuceneTestCase {
// now we add another document which has payloads for field f3 and verify if the SegmentMerger
// enabled payloads for that field
writer = new IndexWriter(ram, analyzer, true);
writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
d = new Document();
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.TOKENIZED));
d.add(new Field("f2", "This field has payloads in all docs", Field.Store.NO, Field.Index.TOKENIZED));
@ -164,7 +164,7 @@ public class TestPayloads extends LuceneTestCase {
// different tests to verify the payload encoding
private void performTest(Directory dir) throws Exception {
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
// should be in sync with value in TermInfosWriter
final int skipInterval = 16;
@ -301,7 +301,7 @@ public class TestPayloads extends LuceneTestCase {
// test long payload
analyzer = new PayloadAnalyzer();
writer = new IndexWriter(dir, analyzer, true);
writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
String singleTerm = "lucene";
d = new Document();
@ -474,7 +474,7 @@ public class TestPayloads extends LuceneTestCase {
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
Directory dir = new RAMDirectory();
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer());
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
final String field = "test";
Thread[] ingesters = new Thread[numThreads];

View File

@ -43,9 +43,9 @@ public class TestSegmentTermEnum extends LuceneTestCase
{
IndexWriter writer = null;
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// add 100 documents with term : aaa
// ADD 100 documents with term : aaa
// add 100 documents with terms: aaa bbb
// Therefore, term 'aaa' has document frequency of 200 and term 'bbb' 100
for (int i = 0; i < 100; i++) {
@ -59,7 +59,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
verifyDocFreq();
// merge segments by optimizing the index
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@ -70,7 +70,7 @@ public class TestSegmentTermEnum extends LuceneTestCase
public void testPrevTermAtEnd() throws IOException
{
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc(writer, "aaa bbb");
writer.close();
IndexReader reader = IndexReader.open(dir);

View File

@ -118,7 +118,7 @@ public class TestStressIndexing extends LuceneTestCase {
stress test.
*/
public void runStressTest(Directory directory, boolean autoCommit, MergeScheduler mergeScheduler) throws Exception {
IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true);
IndexWriter modifier = new IndexWriter(directory, autoCommit, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
modifier.setMaxBufferedDocs(10);

View File

@ -89,7 +89,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
}
Arrays.sort(tokens);
IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
Document doc = new Document();
for(int i=0;i<testFields.length;i++) {

View File

@ -65,7 +65,7 @@ public class TestTermdocPerf extends LuceneTestCase {
Document doc = new Document();
doc.add(new Field(field,val, Field.Store.NO, Field.Index.NO_NORMS));
IndexWriter writer = new IndexWriter(dir, analyzer, true);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(100);
writer.setMergeFactor(100);

View File

@ -53,7 +53,7 @@ public class TestThreadedOptimize extends LuceneTestCase {
public void runTest(Directory directory, boolean autoCommit, MergeScheduler merger) throws Exception {
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true);
IndexWriter writer = new IndexWriter(directory, autoCommit, ANALYZER, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
if (merger != null)
writer.setMergeScheduler(merger);
@ -120,7 +120,7 @@ public class TestThreadedOptimize extends LuceneTestCase {
if (!autoCommit) {
writer.close();
writer = new IndexWriter(directory, autoCommit, ANALYZER, false);
writer = new IndexWriter(directory, autoCommit, ANALYZER, false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
}

View File

@ -62,7 +62,7 @@ public class TestRAMDirectory extends LuceneTestCase {
throw new IOException("java.io.tmpdir undefined, cannot run test");
indexDir = new File(tempDir, "RAMDirIndex");
IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// add some documents
Document doc = null;
for (int i = 0; i < docsToAdd; i++) {
@ -159,7 +159,7 @@ public class TestRAMDirectory extends LuceneTestCase {
public void testRAMDirectorySize() throws IOException, InterruptedException {
final MockRAMDirectory ramDir = new MockRAMDirectory(indexDir.getCanonicalPath());
final IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false);
final IndexWriter writer = new IndexWriter(ramDir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
assertEquals(ramDir.sizeInBytes(), ramDir.getRecomputedSizeInBytes());

View File

@ -282,7 +282,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
public void testStopWordSearching() throws Exception {
Analyzer analyzer = new StandardAnalyzer();
Directory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, analyzer, true);
IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.TOKENIZED));
iw.addDocument(doc);

View File

@ -764,7 +764,7 @@ public class TestQueryParser extends LuceneTestCase {
public void testLocalDateFormat() throws IOException, ParseException {
RAMDirectory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true);
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
iw.close();

View File

@ -46,7 +46,7 @@ public class TestBoolean2 extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.TOKENIZED));

View File

@ -61,7 +61,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
index = new RAMDirectory();
IndexWriter writer = new IndexWriter(index,
new WhitespaceAnalyzer(),
true);
true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < data.length; i++) {
Document doc = new Document();

View File

@ -140,7 +140,7 @@ public class TestBooleanOr extends LuceneTestCase {
RAMDirectory rd = new RAMDirectory();
//
IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
//
Document d = new Document();

View File

@ -65,7 +65,7 @@ public class TestBooleanPrefixQuery extends LuceneTestCase {
Query rw2 = null;
try {
IndexWriter writer = new IndexWriter(directory, new
WhitespaceAnalyzer(), true);
WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < categories.length; i++) {
Document doc = new Document();
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));

View File

@ -47,7 +47,7 @@ public class TestBooleanScorer extends LuceneTestCase
String[] values = new String[] { "1", "2", "3", "4" };
try {
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < values.length; i++) {
Document doc = new Document();
doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.UN_TOKENIZED));

View File

@ -27,7 +27,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
public class TestCachingWrapperFilter extends LuceneTestCase {
public void testCachingWorks() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
IndexReader reader = IndexReader.open(dir);

View File

@ -71,7 +71,7 @@ implements Serializable {
private Directory getIndex()
throws IOException {
RAMDirectory indexStore = new RAMDirectory ();
IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(), true);
IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
RandomGen random = new RandomGen();
for (int i=0; i<INDEX_SIZE; ++i) { // don't decrease; if to low the problem doesn't show up
Document doc = new Document();

View File

@ -50,7 +50,7 @@ public class TestDateFilter
{
// create an index
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
long now = System.currentTimeMillis();
@ -111,7 +111,7 @@ public class TestDateFilter
{
// create an index
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
long now = System.currentTimeMillis();

View File

@ -78,7 +78,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase{
index = new RAMDirectory();
IndexWriter writer = new IndexWriter(index,
new WhitespaceAnalyzer(),
true);
true, IndexWriter.MaxFieldLength.LIMITED);
writer.setSimilarity(sim);
// hed is the most important field, dek is secondary

View File

@ -36,7 +36,7 @@ public class TestDocBoost extends LuceneTestCase {
public void testDocBoost() throws Exception {
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
Fieldable f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);

View File

@ -38,7 +38,7 @@ public class TestExtendedFieldCache extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
long theLong = Long.MAX_VALUE;
double theDouble = Double.MAX_VALUE;
for (int i = 0; i < NUM_DOCS; i++){

View File

@ -50,7 +50,7 @@ extends LuceneTestCase {
public void setUp()
throws Exception {
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add (new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -36,7 +36,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
public void testFuzziness() throws Exception {
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc("aaaaa", writer);
addDoc("aaaab", writer);
addDoc("aaabb", writer);
@ -164,7 +164,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
public void testFuzzinessLong() throws Exception {
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc("aaaaaaa", writer);
addDoc("segment", writer);
writer.optimize();

View File

@ -36,7 +36,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
public void testQuery() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true);
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDoc("one", iw);
addDoc("two", iw);
addDoc("three four", iw);

View File

@ -47,7 +47,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
public void testPhrasePrefix() throws IOException {
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
add("blueberry pie", writer);
add("blueberry strudel", writer);
add("blueberry pizza", writer);
@ -141,7 +141,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
// The contained PhraseMultiQuery must contain exactly one term array.
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
add("blueberry pie", writer);
add("blueberry chewing gum", writer);
add("blue raspberry pie", writer);
@ -169,7 +169,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase
public void testPhrasePrefixWithBooleanQuery() throws IOException {
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new String[]{}), true);
IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(new String[]{}), true, IndexWriter.MaxFieldLength.LIMITED);
add("This is a test", "object", writer);
add("a note", "note", writer);
writer.close();

View File

@ -84,9 +84,9 @@ public class TestMultiSearcher extends LuceneTestCase
lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating an index writer for the first index
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true);
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// creating an index writer for the second index, but writing nothing
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true);
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
//--------------------------------------------------------------------
// scenario 1
@ -130,7 +130,7 @@ public class TestMultiSearcher extends LuceneTestCase
//--------------------------------------------------------------------
// adding one document to the empty index
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false);
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.addDocument(lDoc);
writerB.optimize();
writerB.close();
@ -176,7 +176,7 @@ public class TestMultiSearcher extends LuceneTestCase
readerB.close();
// optimizing the index with the writer
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false);
writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.optimize();
writerB.close();
@ -217,7 +217,7 @@ public class TestMultiSearcher extends LuceneTestCase
IndexWriter indexWriter=null;
try {
indexWriter=new IndexWriter(directory, new KeywordAnalyzer(), create);
indexWriter=new IndexWriter(directory, new KeywordAnalyzer(), create, IndexWriter.MaxFieldLength.LIMITED);
for (int i=0; i<nDocs; i++) {
indexWriter.addDocument(createDocument("doc" + i, contents2));

View File

@ -44,7 +44,7 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
IndexWriter writer
= new IndexWriter(directory, new SimpleAnalyzer(), true);
= new IndexWriter(directory, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
//writer.setUseCompoundFile(false);
//writer.infoStream = System.out;
for (int i = 0; i < numDocs; i++) {

View File

@ -38,7 +38,7 @@ public class TestNot extends LuceneTestCase {
public void testNot() throws Exception {
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.add(new Field("field", "a b", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -51,7 +51,7 @@ public class TestPhrasePrefixQuery
throws IOException
{
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
Document doc1 = new Document();
Document doc2 = new Document();
Document doc3 = new Document();

View File

@ -39,7 +39,7 @@ public class TestPrefixFilter extends LuceneTestCase {
"/Computers/Mac/One",
"/Computers/Mac/Two",
"/Computers/Windows"};
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < categories.length; i++) {
Document doc = new Document();
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));

View File

@ -37,7 +37,7 @@ public class TestPrefixQuery extends LuceneTestCase {
String[] categories = new String[] {"/Computers",
"/Computers/Mac",
"/Computers/Windows"};
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < categories.length; i++) {
Document doc = new Document();
doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));

View File

@ -133,7 +133,7 @@ public class TestRangeQuery extends LuceneTestCase {
}
private void initializeIndex(String[] values) throws IOException {
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < values.length; i++) {
insertDoc(writer, values[i]);
}
@ -141,7 +141,7 @@ public class TestRangeQuery extends LuceneTestCase {
}
private void addDoc(String content) throws IOException {
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
insertDoc(writer, content);
writer.close();
}

View File

@ -54,7 +54,7 @@ public class TestRemoteSearchable extends LuceneTestCase {
private static void startServer() throws Exception {
// construct an index
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore,new SimpleAnalyzer(),true);
IndexWriter writer = new IndexWriter(indexStore,new SimpleAnalyzer(),true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.TOKENIZED));
doc.add(new Field("other", "other test text", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -50,7 +50,7 @@ public class TestScorerPerf extends LuceneTestCase {
// Create a dummy index with nothing in it.
// This could possibly fail if Lucene starts checking for docid ranges...
RAMDirectory rd = new RAMDirectory();
IndexWriter iw = new IndexWriter(rd,new WhitespaceAnalyzer(), true);
IndexWriter iw = new IndexWriter(rd,new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
iw.close();
s = new IndexSearcher(rd);
}
@ -64,7 +64,7 @@ public class TestScorerPerf extends LuceneTestCase {
terms[i] = new Term("f",Character.toString((char)('A'+i)));
}
IndexWriter iw = new IndexWriter(dir,new WhitespaceAnalyzer(), true);
IndexWriter iw = new IndexWriter(dir,new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i=0; i<nDocs; i++) {
Document d = new Document();
for (int j=0; j<nTerms; j++) {

View File

@ -49,7 +49,7 @@ public class TestSearchHitsWithDeletions extends TestCase {
public void setUp() throws Exception {
// Create an index writer.
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i=0; i<N; i++) {
writer.addDocument(createDocument(i));
}

View File

@ -37,7 +37,7 @@ public class TestSetNorm extends LuceneTestCase {
public void testSetNorm() throws Exception {
RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
// add the same document four times
Fieldable f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);

View File

@ -120,7 +120,7 @@ implements Serializable {
private Searcher getIndex (boolean even, boolean odd)
throws IOException {
RAMDirectory indexStore = new RAMDirectory ();
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i=0; i<data.length; ++i) {
if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
Document doc = new Document();

View File

@ -51,7 +51,7 @@ public class TestTermScorer extends LuceneTestCase
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < values.length; i++)
{
Document doc = new Document();

View File

@ -109,7 +109,7 @@ public class TestThreadSafe extends LuceneTestCase {
String[] words = "now is the time for all good men to come to the aid of their country".split(" ");
void buildDir(Directory dir, int nDocs, int maxFields, int maxFieldLen) throws IOException {
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(10);
for (int j=0; j<nDocs; j++) {
Document d = new Document();

View File

@ -139,7 +139,7 @@ public class TestWildcard
private RAMDirectory getIndexStore(String field, String[] contents)
throws IOException {
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true);
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < contents.length; ++i) {
Document doc = new Document();
doc.add(new Field(field, contents[i], Field.Store.YES, Field.Index.TOKENIZED));
@ -197,7 +197,7 @@ public class TestWildcard
// prepare the index
RAMDirectory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer());
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < docs.length; i++) {
Document doc = new Document();
doc.add(new Field(field,docs[i],Store.NO,Index.TOKENIZED));

View File

@ -86,7 +86,7 @@ public class TestBoostingTermQuery extends LuceneTestCase {
RAMDirectory directory = new RAMDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer
= new IndexWriter(directory, analyzer, true);
= new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setSimilarity(similarity);
//writer.infoStream = System.out;
for (int i = 0; i < 1000; i++) {

View File

@ -51,7 +51,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(new Field(FIELD, docFields[i], Field.Store.NO, Field.Index.TOKENIZED));

View File

@ -38,7 +38,7 @@ public class TestSpans extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true);
IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(new Field(field, docFields[i], Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -56,7 +56,7 @@ public class TestSpansAdvanced extends LuceneTestCase {
// create test index
mDirectory = new RAMDirectory();
final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), true);
final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
addDocument(writer, "1", "I think it should work.");
addDocument(writer, "2", "I think it should work.");
addDocument(writer, "3", "I think it should work.");

View File

@ -40,7 +40,7 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
super.setUp();
// create test index
final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), false);
final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
addDocument(writer, "A", "Should we, could we, would we?");
addDocument(writer, "B", "It should. Should it?");
addDocument(writer, "C", "It shouldn't.");

View File

@ -159,7 +159,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
File indexDir = new File(System.getProperty("tempDir"), "testSetBufferSize");
MockFSDirectory dir = new MockFSDirectory(indexDir);
try {
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
for(int i=0;i<37;i++) {
Document doc = new Document();

View File

@ -72,7 +72,7 @@ public class TestWindowsMMap extends LuceneTestCase {
// interior filters.
StandardAnalyzer analyzer = new StandardAnalyzer(new HashSet());
// TODO: something about lock timeouts and leftover locks.
IndexWriter writer = new IndexWriter(storeDirectory, analyzer, true);
IndexWriter writer = new IndexWriter(storeDirectory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexSearcher searcher = new IndexSearcher(storePathname);
for(int dx = 0; dx < 1000; dx ++) {