LUCENE-2294: cutover to IndexWriterConfig object for settings to IW

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@921480 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-03-10 17:53:46 +00:00
parent 8c0dcbb8c3
commit 7641c23cef
188 changed files with 1796 additions and 1644 deletions

View File

@ -76,6 +76,15 @@ API Changes
use by external code. In addition it offers a matchExtension method which
callers can use to query whether a certain file matches a certain extension.
(Shai Erera via Mike McCandless)
* LUCENE-2294: IndexWriter constructors have been deprecated in favor of a
single ctor which accepts IndexWriterConfig and a Directory. You can set all
the parameters related to IndexWriter on IndexWriterConfig. The different
setter/getter methods were deprecated as well. One should call
writer.getConfig().getXYZ() to query for a parameter XYZ.
Additionally, the setter/getter related to MergePolicy were deprecated as
well. One should interact with the MergePolicy directly.
(Shai Erera via Mike McCandless)
* LUCENE-124: Add a TopTermsBoostOnlyBooleanQueryRewrite to MultiTermQuery.
This rewrite method is similar to TopTermsScoringBooleanQueryRewrite, but

View File

@ -31,6 +31,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
@ -51,7 +52,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
super.setUp();
dir = new RAMDirectory();
appAnalyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, appAnalyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(appAnalyzer));
int numDocs = 200;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();

View File

@ -31,6 +31,7 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause;
@ -59,7 +60,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
*/
public IndexSearcher setUpSearcher(Analyzer analyzer) throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc;
doc = new Document();

View File

@ -38,7 +38,10 @@ import org.apache.lucene.document.DateTools;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Searcher;
@ -280,15 +283,17 @@ public class IndexTask extends Task {
log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE);
IndexWriter writer =
new IndexWriter(dir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundIndex);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
Version.LUCENE_CURRENT).setAnalyzer(analyzer).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundIndex);
lmp.setUseCompoundDocStore(useCompoundIndex);
lmp.setMergeFactor(mergeFactor);
int totalFiles = 0;
int totalIndexed = 0;
int totalIgnored = 0;
try {
writer.setMergeFactor(mergeFactor);
for (int i = 0; i < rcs.size(); i++) {
ResourceCollection rc = rcs.elementAt(i);

View File

@ -21,9 +21,12 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.Version;
import java.io.BufferedOutputStream;
import java.io.File;
@ -99,7 +102,7 @@ public class CreateIndexTask extends PerfTask {
final double ramBuffer = config.get("ram.flush.mb",OpenIndexTask.DEFAULT_RAM_FLUSH_MB);
final int maxBuffered = config.get("max.buffered",OpenIndexTask.DEFAULT_MAX_BUFFERED);
if (maxBuffered == IndexWriter.DISABLE_AUTO_FLUSH) {
if (maxBuffered == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
writer.setRAMBufferSizeMB(ramBuffer);
writer.setMaxBufferedDocs(maxBuffered);
} else {
@ -147,10 +150,9 @@ public class CreateIndexTask extends PerfTask {
Config config = runData.getConfig();
IndexWriter writer = new IndexWriter(runData.getDirectory(),
runData.getAnalyzer(),
true,
getIndexDeletionPolicy(config),
IndexWriter.MaxFieldLength.LIMITED);
new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(
runData.getAnalyzer()).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(getIndexDeletionPolicy(config)));
setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;

View File

@ -21,7 +21,9 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.util.Version;
import java.io.IOException;
@ -39,10 +41,10 @@ import java.io.IOException;
*/
public class OpenIndexTask extends PerfTask {
public static final int DEFAULT_MAX_BUFFERED = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
public static final int DEFAULT_MAX_BUFFERED = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
public static final int DEFAULT_MERGE_PFACTOR = LogMergePolicy.DEFAULT_MERGE_FACTOR;
public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB;
public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
private String commitUserData;
public OpenIndexTask(PerfRunData runData) {
@ -61,10 +63,9 @@ public class OpenIndexTask extends PerfTask {
}
IndexWriter writer = new IndexWriter(runData.getDirectory(),
runData.getAnalyzer(),
CreateIndexTask.getIndexDeletionPolicy(config),
IndexWriter.MaxFieldLength.UNLIMITED,
ic);
new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(
runData.getAnalyzer()).setIndexDeletionPolicy(
CreateIndexTask.getIndexDeletionPolicy(config)).setIndexCommit(ic));
CreateIndexTask.setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;

View File

@ -36,12 +36,15 @@ import org.apache.lucene.benchmark.byTask.stats.TaskStats;
import org.apache.lucene.collation.CollationKeyAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.search.FieldCache.StringIndex;
import org.apache.lucene.search.FieldCache;
@ -96,7 +99,9 @@ public class TestPerfTasksLogic extends LuceneTestCase {
assertEquals("TestSearchTask was supposed to be called!",279,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
@ -182,7 +187,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
@ -221,7 +226,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
@ -294,7 +299,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
@ -417,7 +422,9 @@ public class TestPerfTasksLogic extends LuceneTestCase {
benchmark = execBenchmark(algLines2);
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
@ -655,7 +662,9 @@ public class TestPerfTasksLogic extends LuceneTestCase {
// 2. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
assertTrue("did not use the specified MergeScheduler", ((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getMergeScheduler()).called);
assertTrue("did not use the specified MergeScheduler",
((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getConfig()
.getMergeScheduler()).called);
benchmark.getRunData().getIndexWriter().close();
// 3. test number of docs in the index
@ -743,10 +752,10 @@ public class TestPerfTasksLogic extends LuceneTestCase {
// 2. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
final IndexWriter writer = benchmark.getRunData().getIndexWriter();
assertEquals(2, writer.getMaxBufferedDocs());
assertEquals(IndexWriter.DISABLE_AUTO_FLUSH, (int) writer.getRAMBufferSizeMB());
assertEquals(3, writer.getMergeFactor());
assertFalse(writer.getUseCompoundFile());
assertEquals(2, writer.getConfig().getMaxBufferedDocs());
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB());
assertEquals(3, ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor());
assertFalse(((LogMergePolicy) writer.getMergePolicy()).getUseCompoundFile());
writer.close();
Directory dir = benchmark.getRunData().getDirectory();
IndexReader reader = IndexReader.open(dir, true);

View File

@ -30,10 +30,10 @@ import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.index.TermVectorOffsetInfo;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
@ -50,13 +50,13 @@ public class FieldTermStack {
LinkedList<TermInfo> termList = new LinkedList<TermInfo>();
public static void main( String[] args ) throws Exception {
Analyzer analyzer = new WhitespaceAnalyzer();
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "f", analyzer );
Query query = parser.parse( "a x:b" );
FieldQuery fieldQuery = new FieldQuery( query, true, false );
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter( dir, analyzer, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add( new Field( "f", "a a a b b c a b b c d e f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
doc.add( new Field( "f", "b a b a f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );

View File

@ -35,8 +35,9 @@ import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.PhraseQuery;
@ -326,7 +327,9 @@ public abstract class AbstractTestCase extends LuceneTestCase {
// make 1 doc with multi valued field
protected void make1dmfIndex( Analyzer analyzer, String... values ) throws Exception {
IndexWriter writer = new IndexWriter( dir, analyzer, true, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer)
.setOpenMode(OpenMode.CREATE));
Document doc = new Document();
for( String value: values )
doc.add( new Field( F, value, Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
@ -338,7 +341,9 @@ public abstract class AbstractTestCase extends LuceneTestCase {
// make 1 doc with multi valued & not analyzed field
protected void make1dmfIndexNA( String... values ) throws Exception {
IndexWriter writer = new IndexWriter( dir, analyzerK, true, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
analyzerK));
Document doc = new Document();
for( String value: values )
doc.add( new Field( F, value, Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );

View File

@ -24,7 +24,8 @@ import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.Query;
public class SimpleFragmentsBuilderTest extends AbstractTestCase {
@ -118,7 +119,9 @@ public class SimpleFragmentsBuilderTest extends AbstractTestCase {
}
protected void makeUnstoredIndex() throws Exception {
IndexWriter writer = new IndexWriter( dir, analyzerW, true, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
analyzerW));
Document doc = new Document();
doc.add( new Field( F, "aaa", Store.NO, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
writer.addDocument( doc );

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
@ -33,9 +32,9 @@ import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
@ -59,7 +58,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox jumped";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
new IndexWriterConfig(TEST_VERSION_CURRENT));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamConcurrent(),
@ -102,7 +101,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox jumped";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
new IndexWriterConfig(TEST_VERSION_CURRENT));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamConcurrent(),
@ -171,7 +170,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
new IndexWriterConfig(TEST_VERSION_CURRENT));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamSparse(),
@ -213,7 +212,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
new IndexWriterConfig(TEST_VERSION_CURRENT));
try {
final Document document = new Document();
document.add(new Field(FIELD, TEXT, Store.YES, Index.ANALYZED,
@ -253,7 +252,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
new IndexWriterConfig(TEST_VERSION_CURRENT));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamSparse(),

View File

@ -51,8 +51,9 @@ import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanQuery;
@ -80,7 +81,6 @@ import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
@ -89,8 +89,6 @@ import org.w3c.dom.NodeList;
*
*/
public class HighlighterTest extends BaseTokenStreamTestCase implements Formatter {
// TODO: change to CURRENT, does not work because posIncr:
static final Version TEST_VERSION = TEST_VERSION_CURRENT;
private IndexReader reader;
static final String FIELD_NAME = "contents";
@ -99,7 +97,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
RAMDirectory ramDir;
public IndexSearcher searcher = null;
int numHighlights = 0;
final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
TopDocs hits;
String[] texts = {
@ -120,7 +118,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
public void testQueryScorerHits() throws Exception {
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
QueryParser qp = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
query = qp.parse("\"very long\"");
searcher = new IndexSearcher(ramDir, true);
TopDocs hits = searcher.search(query, 10);
@ -150,7 +148,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
String s1 = "I call our world Flatland, not because we call it so,";
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
// Verify that a query against the default field results in text being
// highlighted
@ -182,7 +180,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
*/
private static String highlightField(Query query, String fieldName, String text)
throws IOException, InvalidTokenOffsetsException {
TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION).tokenStream(fieldName, new StringReader(text));
TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION_CURRENT).tokenStream(fieldName, new StringReader(text));
// Assuming "<B>", "</B>" used to highlight
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
@ -228,7 +226,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
+ " OR " + f2c + ph2 + ")";
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
QueryParser qp = new QueryParser(TEST_VERSION, f1, analyzer);
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
Query query = qp.parse(q);
QueryScorer scorer = new QueryScorer(query, f1);
@ -678,7 +676,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// Need to explicitly set the QueryParser property to use TermRangeQuery
// rather
// than RangeFilters
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse(queryString);
doSearching(query);
@ -1028,7 +1026,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
String srchkey = "football";
String s = "football-soccer in the euro 2004 footie competition";
QueryParser parser = new QueryParser(TEST_VERSION, "bookid", analyzer);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "bookid", analyzer);
Query query = parser.parse(srchkey);
TokenStream tokenStream = analyzer.tokenStream(null, new StringReader(s));
@ -1154,13 +1152,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
sb.append(stopWords.iterator().next());
}
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION, stopWords).tokenStream(
Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords).tokenStream(
"data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
// new
// QueryTermScorer(query));
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(100);
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
@ -1171,7 +1169,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// + whitespace)
sb.append(" ");
sb.append(goodWord);
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
}
@ -1192,11 +1190,11 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
String text = "this is a text with searchterm in it";
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION,
Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION_CURRENT,
stopWords).tokenStream("text", new StringReader(text)), fm);
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(36);
String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "text", text);
String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "text", text);
assertTrue(
"Matched text should contain remainder of text after highlighted query ",
match.endsWith("in it"));
@ -1213,9 +1211,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
numHighlights = 0;
// test to show how rewritten query can still be used
searcher = new IndexSearcher(ramDir, true);
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
Query query = parser.parse("JF? or Kenned*");
System.out.println("Searching with primitive query");
// forget to set this and...
@ -1326,7 +1324,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
public void testMultiSearcher() throws Exception {
// setup index 1
RAMDirectory ramDir1 = new RAMDirectory();
IndexWriter writer1 = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer1 = new IndexWriter(ramDir1, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
Document d = new Document();
Field f = new Field(FIELD_NAME, "multiOne", Field.Store.YES, Field.Index.ANALYZED);
d.add(f);
@ -1337,7 +1337,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// setup index 2
RAMDirectory ramDir2 = new RAMDirectory();
IndexWriter writer2 = new IndexWriter(ramDir2, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer2 = new IndexWriter(ramDir2, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
d = new Document();
f = new Field(FIELD_NAME, "multiTwo", Field.Store.YES, Field.Index.ANALYZED);
d.add(f);
@ -1350,7 +1352,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
searchers[0] = new IndexSearcher(ramDir1, true);
searchers[1] = new IndexSearcher(ramDir2, true);
MultiSearcher multiSearcher = new MultiSearcher(searchers);
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse("multi*");
System.out.println("Searching for: " + query.toString(FIELD_NAME));
@ -1384,7 +1386,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
@Override
public void run() throws Exception {
String docMainText = "fred is one of the people";
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
Query query = parser.parse("fred category:people");
// highlighting respects fieldnames used in query
@ -1530,64 +1532,64 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
Highlighter highlighter;
String result;
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 <B>foo</B>", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed<B>10</B> foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi</B>-Speed10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-<B>Speed</B>10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
// ///////////////// same tests, just put the bigger overlapping token
// first
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 <B>foo</B>", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed<B>10</B> foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi</B>-Speed10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-<B>Speed</B>10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("<B>Hi-Speed</B>10 foo", result);
@ -1613,7 +1615,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void makeIndex() throws IOException {
IndexWriter writer = new IndexWriter( dir, a, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@ -1623,7 +1625,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
private void deleteDocument() throws IOException {
IndexWriter writer = new IndexWriter( dir, a, false, MaxFieldLength.LIMITED );
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.deleteDocuments( new Term( "t_text1", "del" ) );
// To see negative idf, keep comment the following line
//writer.optimize();
@ -1632,7 +1634,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private void searchIndex() throws IOException, ParseException, InvalidTokenOffsetsException {
String q = "t_text1:random";
QueryParser parser = new QueryParser(TEST_VERSION, "t_text1", a );
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "t_text1", a );
Query query = parser.parse( q );
IndexSearcher searcher = new IndexSearcher( dir, true );
// This scorer can return negative idf -> null fragment
@ -1686,7 +1688,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
public void doSearching(String queryString) throws Exception {
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
parser.setEnablePositionIncrements(true);
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse(queryString);
@ -1725,7 +1727,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
protected void setUp() throws Exception {
super.setUp();
ramDir = new RAMDirectory();
IndexWriter writer = new IndexWriter(ramDir, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
for (int i = 0; i < texts.length; i++) {
addDoc(writer, texts[i]);
}

View File

@ -16,9 +16,12 @@
package org.apache.lucene.store.instantiated;
import junit.framework.TestCase;
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.IndexSearcher;
@ -26,11 +29,9 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import java.util.Arrays;
import java.io.IOException;
public class TestEmptyIndex extends TestCase {
public class TestEmptyIndex extends LuceneTestCase {
public void testSearch() throws Exception {
@ -60,7 +61,7 @@ public class TestEmptyIndex extends TestCase {
// make sure a Directory acts the same
Directory d = new RAMDirectory();
new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)).close();
r = IndexReader.open(d, false);
testNorms(r);
r.close();
@ -93,7 +94,7 @@ public class TestEmptyIndex extends TestCase {
// make sure a Directory acts the same
Directory d = new RAMDirectory();
new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)).close();
r = IndexReader.open(d, false);
termEnumTest(r);
r.close();

View File

@ -30,6 +30,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
@ -60,7 +61,9 @@ public class TestIndicesEquals extends LuceneTestCase {
RAMDirectory dir = new RAMDirectory();
// create dir data
IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
for (int i = 0; i < 20; i++) {
Document document = new Document();
assembleDocument(document, i);
@ -84,7 +87,9 @@ public class TestIndicesEquals extends LuceneTestCase {
InstantiatedIndex ii = new InstantiatedIndex();
// create dir data
IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
for (int i = 0; i < 500; i++) {
Document document = new Document();
assembleDocument(document, i);

View File

@ -22,7 +22,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -35,7 +35,7 @@ public class TestSerialization extends LuceneTestCase {
Directory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("foo", "bar rab abr bra rba", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
doc.add(new Field("moo", "bar rab abr bra rba", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));

View File

@ -18,10 +18,11 @@ package org.apache.lucene.store.instantiated;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -32,17 +33,17 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase {
public void test() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocument(iw, "Hello, world!");
addDocument(iw, "All work and no play makes jack a dull boy");
iw.close();
iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, tellus!");
addDocument(iw, "All work and no play makes danny a dull boy");
iw.close();
iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, earth!");
addDocument(iw, "All work and no play makes wendy a dull girl");
iw.close();

View File

@ -42,9 +42,11 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.IndexReader.FieldOption;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.Collector;
@ -169,7 +171,9 @@ class LuceneMethods {
public void optimize() throws IOException {
//open the index writer. False: don't create a new one
IndexWriter indexWriter = new IndexWriter(indexName, createAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter indexWriter = new IndexWriter(indexName, new IndexWriterConfig(
Version.LUCENE_CURRENT).setAnalyzer(createAnalyzer()).setOpenMode(
OpenMode.APPEND));
message("Starting to optimize index.");
long start = System.currentTimeMillis();
indexWriter.optimize();

View File

@ -42,6 +42,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Collector;
@ -410,7 +411,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = null;
try {
writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
writer.addDocument(doc);
writer.optimize();
return dir;

View File

@ -21,11 +21,11 @@ import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.Version;
/**
* This tool splits input index into multiple equal parts. The method employed
@ -88,8 +88,7 @@ public class MultiPassIndexSplitter {
}
}
}
IndexWriter w = new IndexWriter(outputs[i], new WhitespaceAnalyzer(),
true, MaxFieldLength.UNLIMITED);
IndexWriter w = new IndexWriter(outputs[i], new IndexWriterConfig(Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
System.err.println("Writing part " + (i + 1) + " ...");
w.addIndexes(new IndexReader[]{input});
w.close();

View File

@ -17,9 +17,11 @@ package org.apache.lucene.misc;
*/
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import java.io.File;
import java.io.IOException;
@ -36,7 +38,8 @@ public class IndexMergeTool {
}
FSDirectory mergedIndex = FSDirectory.open(new File(args[0]));
IndexWriter writer = new IndexWriter(mergedIndex, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
Directory[] indexes = new Directory[args.length - 1];
for (int i = 1; i < args.length; i++) {

View File

@ -23,7 +23,6 @@ import java.util.Arrays;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.IndexSearcher;
@ -58,7 +57,9 @@ public class TestFieldNormModifier extends LuceneTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
TEST_VERSION_CURRENT)));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();

View File

@ -18,9 +18,8 @@ package org.apache.lucene.index;
import java.io.File;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@ -35,7 +34,7 @@ public class TestIndexSplitter extends LuceneTestCase {
_TestUtil.rmDir(destDir);
destDir.mkdirs();
FSDirectory fsDir = FSDirectory.open(dir);
IndexWriter iw = new IndexWriter(fsDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(fsDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
for (int x=0; x < 100; x++) {
Document doc = TestIndexWriterReader.createDocument(x, "index", 5);
iw.addDocument(doc);

View File

@ -16,10 +16,8 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -32,8 +30,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
RAMDirectory dir = new RAMDirectory();
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc;
for (int i = 0; i < NUM_DOCS; i++) {
doc = new Document();

View File

@ -28,7 +28,7 @@ public class TestTermVectorAccessor extends LuceneTestCase {
public void test() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet())));
Document doc;

View File

@ -20,13 +20,11 @@ package org.apache.lucene.misc;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CachingWrapperFilter;
@ -58,8 +56,7 @@ public class ChainedFilterTest extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer =
new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
Calendar cal = new GregorianCalendar();
cal.clear();
@ -187,9 +184,7 @@ public class ChainedFilterTest extends LuceneTestCase {
public void testWithCachingFilter() throws Exception {
Directory dir = new RAMDirectory();
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.close();
Searcher searcher = new IndexSearcher(dir, true);

View File

@ -19,14 +19,13 @@ package org.apache.lucene.misc;
import java.io.IOException;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldNormModifier;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.IndexSearcher;
@ -61,7 +60,7 @@ public class TestLengthNormModifier extends LuceneTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();

View File

@ -24,7 +24,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -113,7 +113,7 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
RAMDirectory rd = new RAMDirectory();
IndexWriter w = new IndexWriter(rd, analyzer, MaxFieldLength.UNLIMITED);
IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
for (int i = 0; i < docsContent.length; i++) {
Document doc = new Document();
doc.add(new Field("name", docsContent[i].name, Field.Store.YES,

View File

@ -19,11 +19,11 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -36,7 +36,7 @@ public class BooleanFilterTest extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
addDoc(writer, "admin guest", "010", "20040101","Y");

View File

@ -25,6 +25,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.store.RAMDirectory;
@ -41,7 +42,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
//Add series of docs with filterable fields : url, text and dates flags
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");

View File

@ -25,8 +25,8 @@ import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -39,7 +39,8 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, analyzer,true, MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
//Add series of docs with misspelt names
addDoc(writer, "jonathon smythe","1");

View File

@ -19,13 +19,12 @@ package org.apache.lucene.search;
import java.util.HashSet;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
@ -54,9 +53,8 @@ public class TermsFilterTest extends LuceneTestCase {
{
String fieldName="field1";
RAMDirectory rd=new RAMDirectory();
IndexWriter w=new IndexWriter(rd,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),MaxFieldLength.UNLIMITED);
for (int i = 0; i < 100; i++)
{
IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 100; i++) {
Document doc=new Document();
int term=i*10; //terms are units of 10;
doc.add(new Field(fieldName,""+term,Field.Store.YES,Field.Index.NOT_ANALYZED));

View File

@ -28,7 +28,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
@ -45,8 +45,7 @@ public class TestMoreLikeThis extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT),
true, MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
// Add series of docs with specific information for MoreLikeThis
addDoc(writer, "lucene");

View File

@ -27,6 +27,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.core.QueryNodeException;
import org.apache.lucene.queryParser.standard.config.DefaultOperatorAttribute.Operator;
import org.apache.lucene.search.BooleanClause;
@ -319,8 +320,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
public void testStopWordSearching() throws Exception {
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
Directory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, analyzer, true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add(new Field("body", "blah the footest blah", Field.Store.NO,
Field.Index.ANALYZED));

View File

@ -40,7 +40,11 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
/**
* Tests multi field query parsing using the {@link MultiFieldQueryParserWrapper}.
* Tests multi field query parsing using the
* {@link MultiFieldQueryParserWrapper}.
*
* @deprecated this tests test the deprecated MultiFieldQueryParserWrapper, so
* when the latter is gone, so should this test.
*/
public class TestMultiFieldQueryParserWrapper extends LuceneTestCase {

View File

@ -51,6 +51,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.messages.MessageImpl;
import org.apache.lucene.queryParser.core.QueryNodeException;
@ -571,8 +572,7 @@ public class TestQPHelper extends LocalizedTestCase {
public void testFarsiRangeCollating() throws Exception {
RAMDirectory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
Field.Index.NOT_ANALYZED));
@ -994,8 +994,7 @@ public class TestQPHelper extends LocalizedTestCase {
public void testLocalDateFormat() throws IOException, QueryNodeException {
RAMDirectory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
iw.close();
@ -1193,7 +1192,7 @@ public class TestQPHelper extends LocalizedTestCase {
public void testMultiPhraseQuery() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new CannedAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new CannedAnalyzer()));
Document doc = new Document();
doc.add(new Field("field", "", Field.Store.NO, Field.Index.ANALYZED));
w.addDocument(doc);

View File

@ -78,6 +78,9 @@ import org.apache.lucene.util.LocalizedTestCase;
* to use new {@link QueryParserWrapper} instead of the old query parser.
*
* Tests QueryParser.
*
* @deprecated this entire test case tests QueryParserWrapper which is
* deprecated. When QPW is gone, so will the test.
*/
public class TestQueryParserWrapper extends LocalizedTestCase {

View File

@ -19,8 +19,8 @@ package org.apache.lucene.search.regex;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.search.IndexSearcher;
@ -40,8 +40,7 @@ public class TestRegexQuery extends LuceneTestCase {
super.setUp();
RAMDirectory directory = new RAMDirectory();
try {
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field(FN, "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);

View File

@ -19,13 +19,13 @@ package org.apache.lucene.search.regex;
import java.io.IOException;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiSearcher;
import org.apache.lucene.search.spans.SpanFirstQuery;
@ -44,7 +44,7 @@ public class TestSpanRegexQuery extends LuceneTestCase {
public void testSpanRegex() throws Exception {
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
// doc.add(new Field("field", "the quick brown fox jumps over the lazy dog",
// Field.Store.NO, Field.Index.ANALYZED));
@ -109,15 +109,15 @@ public class TestSpanRegexQuery extends LuceneTestCase {
Field.Index.ANALYZED_NO_NORMS));
// creating first index writer
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT),
true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
writerA.addDocument(lDoc);
writerA.optimize();
writerA.close();
// creating second index writer
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT),
true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
writerB.addDocument(lDoc2);
writerB.optimize();
writerB.close();

View File

@ -27,6 +27,7 @@ import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
@ -57,8 +58,9 @@ public class TestRemoteCachingWrapperFilter extends LuceneTestCase {
private static void startServer() throws Exception {
// construct an index
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
doc.add(new Field("type", "A", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -19,9 +19,9 @@ package org.apache.lucene.search;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
@ -58,7 +58,7 @@ public class TestRemoteSearchable extends LuceneTestCase {
private static void startServer() throws Exception {
// construct an index
RAMDirectory indexStore = new RAMDirectory();
IndexWriter writer = new IndexWriter(indexStore,new SimpleAnalyzer(TEST_VERSION_CURRENT),true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(indexStore,new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
doc.add(new Field("other", "other test text", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -30,11 +30,12 @@ import junit.framework.Test;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -109,9 +110,9 @@ public class TestRemoteSort extends LuceneTestCase implements Serializable {
private Searcher getIndex (boolean even, boolean odd)
throws IOException {
RAMDirectory indexStore = new RAMDirectory ();
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setMergeFactor(1000);
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(1000);
for (int i=0; i<data.length; ++i) {
if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
Document doc = new Document();

View File

@ -21,11 +21,11 @@ import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -71,7 +71,7 @@ public class TestCartesian extends LuceneTestCase {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
setUpPlotter( 2, 15);

View File

@ -18,10 +18,10 @@ package org.apache.lucene.spatial.tier;
import java.io.IOException;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.QueryWrapperFilter;
@ -44,7 +44,7 @@ public class TestDistance extends LuceneTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
addData(writer);
}

View File

@ -20,12 +20,14 @@ package org.apache.lucene.search.spell;
import java.io.IOException;
import java.util.Iterator;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
@ -34,6 +36,7 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Version;
/**
* <p>
@ -139,8 +142,7 @@ public class SpellChecker implements java.io.Closeable {
synchronized (modifyCurrentIndexLock) {
ensureOpen();
if (!IndexReader.indexExists(spellIndexDir)) {
IndexWriter writer = new IndexWriter(spellIndexDir, null, true,
IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(spellIndexDir, new IndexWriterConfig(Version.LUCENE_CURRENT));
writer.close();
}
swapSearcher(spellIndexDir);
@ -353,7 +355,8 @@ public class SpellChecker implements java.io.Closeable {
synchronized (modifyCurrentIndexLock) {
ensureOpen();
final Directory dir = this.spellIndex;
final IndexWriter writer = new IndexWriter(dir, null, true, IndexWriter.MaxFieldLength.UNLIMITED);
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
writer.close();
swapSearcher(dir);
}
@ -388,10 +391,8 @@ public class SpellChecker implements java.io.Closeable {
synchronized (modifyCurrentIndexLock) {
ensureOpen();
final Directory dir = this.spellIndex;
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(),
IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMergeFactor(mergeFactor);
writer.setRAMBufferSizeMB(ramMB);
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT).setRAMBufferSizeMB(ramMB));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(mergeFactor);
Iterator<String> iter = dict.getWordsIterator();
while (iter.hasNext()) {

View File

@ -20,11 +20,11 @@ package org.apache.lucene.search.spell;
import java.io.IOException;
import java.util.Iterator;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -46,7 +46,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
IndexWriter writer = new IndexWriter(store, new WhitespaceAnalyzer(LuceneTestCase.TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc;

View File

@ -26,12 +26,12 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
@ -54,7 +54,7 @@ public class TestSpellChecker extends LuceneTestCase {
//create a user index
userindex = new RAMDirectory();
IndexWriter writer = new IndexWriter(userindex, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 1000; i++) {
Document doc = new Document();

View File

@ -19,11 +19,11 @@ package org.apache.lucene.queryParser.surround.query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Version;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
public class SingleFieldTestDb {
private Directory db;
@ -35,9 +35,7 @@ public class SingleFieldTestDb {
db = new RAMDirectory();
docs = documents;
fieldName = fName;
Analyzer analyzer = new WhitespaceAnalyzer();
IndexWriter writer = new IndexWriter(db, analyzer, true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(Version.LUCENE_CURRENT));
for (int j = 0; j < docs.length; j++) {
Document d = new Document();
d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));

View File

@ -31,6 +31,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
@ -87,7 +88,7 @@ public class ListSearcher extends AbstractListModel {
private ListDataListener listModelListener;
public ListSearcher(ListModel newModel) {
analyzer = new WhitespaceAnalyzer();
analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
setListModel(newModel);
listModelListener = new ListModelHandler();
newModel.addListDataListener(listModelListener);
@ -117,7 +118,7 @@ public class ListSearcher extends AbstractListModel {
try {
// recreate the RAMDirectory
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
// iterate through all rows
for (int row=0; row < listModel.getSize(); row++){

View File

@ -29,6 +29,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -115,7 +116,7 @@ public class TableSearcher extends AbstractTableModel {
* @param tableModel The table model to decorate
*/
public TableSearcher(TableModel tableModel) {
analyzer = new WhitespaceAnalyzer();
analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
tableModelListener = new TableModelHandler();
setTableModel(tableModel);
tableModel.addTableModelListener(tableModelListener);
@ -163,7 +164,7 @@ public class TableSearcher extends AbstractTableModel {
try {
// recreate the RAMDirectory
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
// iterate through all rows
for (int row=0; row < tableModel.getRowCount(); row++){

View File

@ -35,6 +35,9 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@ -245,8 +248,10 @@ public class Syns2Index
try {
// override the specific index if it already exists
IndexWriter writer = new IndexWriter(dir, ana, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(true); // why?
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(ana));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true); // why?
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true); // why?
Iterator<String> i1 = word2Nums.keySet().iterator();
while (i1.hasNext()) // for each word
{

View File

@ -12,12 +12,14 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Version;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -63,7 +65,7 @@ public class TestParser extends TestCase {
{
BufferedReader d = new BufferedReader(new InputStreamReader(TestParser.class.getResourceAsStream("reuters21578.txt")));
dir=new RAMDirectory();
IndexWriter writer=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_24).setAnalyzer(analyzer));
String line = d.readLine();
while(line!=null)
{

View File

@ -11,6 +11,7 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.RAMDirectory;
@ -141,7 +142,7 @@ public class TestQueryTemplateManager extends LuceneTestCase {
//Create an index
RAMDirectory dir=new RAMDirectory();
IndexWriter w=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
for (int i = 0; i < docFieldValues.length; i++)
{
w.addDocument(getDocumentFromString(docFieldValues[i]));

View File

@ -19,6 +19,8 @@ package org.apache.lucene.demo;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@ -55,7 +57,10 @@ public class IndexFiles {
Date start = new Date();
try {
IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR), new StandardAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR),
new IndexWriterConfig(Version.LUCENE_CURRENT).setOpenMode(
OpenMode.CREATE).setAnalyzer(
new StandardAnalyzer(Version.LUCENE_CURRENT)));
System.out.println("Indexing to directory '" +INDEX_DIR+ "'...");
indexDocs(writer, docDir);
System.out.println("Optimizing...");

View File

@ -21,8 +21,10 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@ -77,8 +79,9 @@ public class IndexHTML {
deleting = true;
indexDocs(root, index, create);
}
writer = new IndexWriter(FSDirectory.open(index), new StandardAnalyzer(Version.LUCENE_CURRENT), create,
new IndexWriter.MaxFieldLength(1000000));
writer = new IndexWriter(FSDirectory.open(index), new IndexWriterConfig(
Version.LUCENE_CURRENT).setAnalyzer(new StandardAnalyzer(
Version.LUCENE_CURRENT)).setMaxFieldLength(1000000).setOpenMode(create ? OpenMode.CREATE : OpenMode.CREATE_OR_APPEND));
indexDocs(root, index, create); // add new docs
System.out.println("Optimizing index...");

View File

@ -742,7 +742,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
if (writeLock == null) {
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
if (!writeLock.obtain(IndexWriter.WRITE_LOCK_TIMEOUT)) // obtain write lock
if (!writeLock.obtain(IndexWriterConfig.WRITE_LOCK_TIMEOUT)) // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
this.writeLock = writeLock;

View File

@ -138,7 +138,7 @@ final class DocumentsWriter {
private DocFieldProcessor docFieldProcessor;
PrintStream infoStream;
int maxFieldLength = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
int maxFieldLength = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
Similarity similarity;
List<String> newFiles;
@ -223,7 +223,7 @@ final class DocumentsWriter {
abstract DocConsumer getChain(DocumentsWriter documentsWriter);
}
static final IndexingChain DefaultIndexingChain = new IndexingChain() {
static final IndexingChain defaultIndexingChain = new IndexingChain() {
@Override
DocConsumer getChain(DocumentsWriter documentsWriter) {
@ -270,22 +270,22 @@ final class DocumentsWriter {
// The max number of delete terms that can be buffered before
// they must be flushed to disk.
private int maxBufferedDeleteTerms = IndexWriter.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
private int maxBufferedDeleteTerms = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
// How much RAM we can use before flushing. This is 0 if
// we are flushing by doc count instead.
private long ramBufferSize = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
private long ramBufferSize = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
private long waitQueuePauseBytes = (long) (ramBufferSize*0.1);
private long waitQueueResumeBytes = (long) (ramBufferSize*0.05);
// If we've allocated 5% over our RAM budget, we then
// free down to 95%
private long freeTrigger = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
private long freeLevel = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
private long freeTrigger = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
private long freeLevel = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
// Flush @ this number of docs. If ramBufferSize is
// non-zero we will flush by RAM usage instead.
private int maxBufferedDocs = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
private int maxBufferedDocs = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
private int flushedDocCount; // How many docs already flushed to index
@ -304,7 +304,7 @@ final class DocumentsWriter {
DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain) throws IOException {
this.directory = directory;
this.writer = writer;
this.similarity = writer.getSimilarity();
this.similarity = writer.getConfig().getSimilarity();
flushedDocCount = writer.maxDoc();
consumer = indexingChain.getChain(this);
@ -342,8 +342,8 @@ final class DocumentsWriter {
/** Set how much RAM we can use before flushing. */
synchronized void setRAMBufferSizeMB(double mb) {
if (mb == IndexWriter.DISABLE_AUTO_FLUSH) {
ramBufferSize = IndexWriter.DISABLE_AUTO_FLUSH;
if (mb == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
ramBufferSize = IndexWriterConfig.DISABLE_AUTO_FLUSH;
waitQueuePauseBytes = 4*1024*1024;
waitQueueResumeBytes = 2*1024*1024;
} else {
@ -356,7 +356,7 @@ final class DocumentsWriter {
}
synchronized double getRAMBufferSizeMB() {
if (ramBufferSize == IndexWriter.DISABLE_AUTO_FLUSH) {
if (ramBufferSize == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
return ramBufferSize;
} else {
return ramBufferSize/1024./1024.;
@ -587,7 +587,7 @@ final class DocumentsWriter {
synchronized private void initFlushState(boolean onlyDocStore) {
initSegmentName(onlyDocStore);
flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getTermIndexInterval());
flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getConfig().getTermIndexInterval());
}
/** Flush all pending docs to a new segment */
@ -766,7 +766,7 @@ final class DocumentsWriter {
// always get N docs when we flush by doc count, even if
// > 1 thread is adding documents:
if (!flushPending &&
maxBufferedDocs != IndexWriter.DISABLE_AUTO_FLUSH
maxBufferedDocs != IndexWriterConfig.DISABLE_AUTO_FLUSH
&& numDocsInRAM >= maxBufferedDocs) {
flushPending = true;
state.doFlushAfter = true;
@ -928,9 +928,9 @@ final class DocumentsWriter {
}
synchronized boolean deletesFull() {
return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed + numBytesUsed) >= ramBufferSize) ||
(maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
(maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
}
@ -943,9 +943,9 @@ final class DocumentsWriter {
// too-frequent flushing of a long tail of tiny segments
// when merges (which always apply deletes) are
// infrequent.
return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed) >= ramBufferSize/2) ||
(maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
(maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
}
@ -1115,7 +1115,7 @@ final class DocumentsWriter {
}
synchronized boolean doBalanceRAM() {
return ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
return ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
}
/** Does the synchronized work to finish/flush the

View File

@ -19,7 +19,7 @@ package org.apache.lucene.index;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.DocumentsWriter.IndexingChain;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
@ -29,6 +29,7 @@ import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.Version;
import java.io.IOException;
import java.io.Closeable;
@ -179,10 +180,11 @@ public class IndexWriter implements Closeable {
/**
* Default value for the write lock timeout (1,000).
* @see #setDefaultWriteLockTimeout
* @deprecated use {@link IndexWriterConfig#WRITE_LOCK_TIMEOUT} instead
*/
public static long WRITE_LOCK_TIMEOUT = 1000;
public static long WRITE_LOCK_TIMEOUT = IndexWriterConfig.WRITE_LOCK_TIMEOUT;
private long writeLockTimeout = WRITE_LOCK_TIMEOUT;
private long writeLockTimeout;
/**
* Name of the write lock in the index.
@ -191,36 +193,43 @@ public class IndexWriter implements Closeable {
/**
* Value to denote a flush trigger is disabled
* @deprecated use {@link IndexWriterConfig#DISABLE_AUTO_FLUSH} instead
*/
public final static int DISABLE_AUTO_FLUSH = -1;
public final static int DISABLE_AUTO_FLUSH = IndexWriterConfig.DISABLE_AUTO_FLUSH;
/**
* Disabled by default (because IndexWriter flushes by RAM usage
* by default). Change using {@link #setMaxBufferedDocs(int)}.
* @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DOCS} instead.
*/
public final static int DEFAULT_MAX_BUFFERED_DOCS = DISABLE_AUTO_FLUSH;
public final static int DEFAULT_MAX_BUFFERED_DOCS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
/**
* Default value is 16 MB (which means flush when buffered
* docs consume 16 MB RAM). Change using {@link #setRAMBufferSizeMB}.
* @deprecated use {@link IndexWriterConfig#DEFAULT_RAM_BUFFER_SIZE_MB} instead.
*/
public final static double DEFAULT_RAM_BUFFER_SIZE_MB = 16.0;
public final static double DEFAULT_RAM_BUFFER_SIZE_MB = IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
/**
* Disabled by default (because IndexWriter flushes by RAM usage
* by default). Change using {@link #setMaxBufferedDeleteTerms(int)}.
* @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DELETE_TERMS} instead
*/
public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
/**
* Default value is 10,000. Change using {@link #setMaxFieldLength(int)}.
*
* @deprecated see {@link IndexWriterConfig}
*/
public final static int DEFAULT_MAX_FIELD_LENGTH = 10000;
/**
* Default value is 128. Change using {@link #setTermIndexInterval(int)}.
* @deprecated use {@link IndexWriterConfig#DEFAULT_TERM_INDEX_INTERVAL} instead.
*/
public final static int DEFAULT_TERM_INDEX_INTERVAL = 128;
public final static int DEFAULT_TERM_INDEX_INTERVAL = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
/**
* Absolute hard maximum length for a term. If a term
@ -244,10 +253,11 @@ public class IndexWriter implements Closeable {
private int messageID = -1;
volatile private boolean hitOOM;
private Directory directory; // where this index resides
private Analyzer analyzer; // how to analyze text
private final Directory directory; // where this index resides
private final Analyzer analyzer; // how to analyze text
private Similarity similarity = Similarity.getDefault(); // how to normalize
// TODO 4.0: this should be made final once the setter is out
private /*final*/Similarity similarity = Similarity.getDefault(); // how to normalize
private volatile long changeCount; // increments every time a change is completed
private long lastCommitChangeCount; // last changeCount that was committed
@ -270,7 +280,8 @@ public class IndexWriter implements Closeable {
private Lock writeLock;
private int termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL;
// TODO 4.0: this should be made final once the setter is out
private /*final*/int termIndexInterval;
private boolean closed;
private boolean closing;
@ -280,7 +291,8 @@ public class IndexWriter implements Closeable {
private HashSet<SegmentInfo> mergingSegments = new HashSet<SegmentInfo>();
private MergePolicy mergePolicy = new LogByteSizeMergePolicy(this);
private MergeScheduler mergeScheduler = new ConcurrentMergeScheduler();
// TODO 4.0: this should be made final once the setter is removed
private /*final*/MergeScheduler mergeScheduler;
private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<MergePolicy.OneMerge>();
private Set<MergePolicy.OneMerge> runningMerges = new HashSet<MergePolicy.OneMerge>();
private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
@ -307,7 +319,11 @@ public class IndexWriter implements Closeable {
// deletes, doing merges, and reopening near real-time
// readers.
private volatile boolean poolReaders;
// The instance that was passed to the constructor. It is saved only in order
// to allow users to query an IndexWriter settings.
private final IndexWriterConfig config;
/**
* Expert: returns a readonly reader, covering all
* committed as well as un-committed changes to the index.
@ -777,19 +793,29 @@ public class IndexWriter implements Closeable {
* Otherwise an IllegalArgumentException is thrown.</p>
*
* @see #setUseCompoundFile(boolean)
* @deprecated use {@link LogMergePolicy#getUseCompoundDocStore()} and
* {@link LogMergePolicy#getUseCompoundFile()} directly.
*/
public boolean getUseCompoundFile() {
return getLogMergePolicy().getUseCompoundFile();
}
/** <p>Setting to turn on usage of a compound file. When on,
* multiple files for each segment are merged into a
* single file when a new segment is flushed.</p>
*
* <p>Note that this method is a convenience method: it
* just calls mergePolicy.setUseCompoundFile as long as
* mergePolicy is an instance of {@link LogMergePolicy}.
* Otherwise an IllegalArgumentException is thrown.</p>
/**
* <p>
* Setting to turn on usage of a compound file. When on, multiple files for
* each segment are merged into a single file when a new segment is flushed.
* </p>
*
* <p>
* Note that this method is a convenience method: it just calls
* mergePolicy.setUseCompoundFile as long as mergePolicy is an instance of
* {@link LogMergePolicy}. Otherwise an IllegalArgumentException is thrown.
* </p>
*
* @deprecated use {@link LogMergePolicy#setUseCompoundDocStore(boolean)} and
* {@link LogMergePolicy#setUseCompoundFile(boolean)} directly.
* Note that this method set the given value on both, therefore
* you should consider doing the same.
*/
public void setUseCompoundFile(boolean value) {
getLogMergePolicy().setUseCompoundFile(value);
@ -799,20 +825,25 @@ public class IndexWriter implements Closeable {
/** Expert: Set the Similarity implementation used by this IndexWriter.
*
* @see Similarity#setDefault(Similarity)
* @deprecated use {@link IndexWriterConfig#setSimilarity(Similarity)} instead
*/
public void setSimilarity(Similarity similarity) {
ensureOpen();
this.similarity = similarity;
docWriter.setSimilarity(similarity);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setSimilarity(similarity);
}
/** Expert: Return the Similarity implementation used by this IndexWriter.
*
* <p>This defaults to the current value of {@link Similarity#getDefault()}.
* @deprecated use {@link IndexWriterConfig#getSimilarity()} instead
*/
public Similarity getSimilarity() {
ensureOpen();
return this.similarity;
return similarity;
}
/** Expert: Set the interval between indexed terms. Large values cause less
@ -835,15 +866,20 @@ public class IndexWriter implements Closeable {
* must be scanned for each random term access.
*
* @see #DEFAULT_TERM_INDEX_INTERVAL
* @deprecated use {@link IndexWriterConfig#setTermIndexInterval(int)}
*/
public void setTermIndexInterval(int interval) {
ensureOpen();
this.termIndexInterval = interval;
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setTermIndexInterval(interval);
}
/** Expert: Return the interval between indexed terms.
*
* @see #setTermIndexInterval(int)
* @deprecated use {@link IndexWriterConfig#getTermIndexInterval()}
*/
public int getTermIndexInterval() {
// We pass false because this method is called by SegmentMerger while we are in the process of closing
@ -872,10 +908,13 @@ public class IndexWriter implements Closeable {
* if it does not exist and <code>create</code> is
* <code>false</code> or if there is any other low-level
* IO error
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, boolean create, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, create, null, mfl.getLimit(), null, null);
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
mfl.getLimit()));
}
/**
@ -895,10 +934,12 @@ public class IndexWriter implements Closeable {
* @throws IOException if the directory cannot be
* read/written to or if there is any other low-level
* IO error
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, null, mfl.getLimit(), null, null);
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
.setMaxFieldLength(mfl.getLimit()));
}
/**
@ -918,10 +959,13 @@ public class IndexWriter implements Closeable {
* @throws IOException if the directory cannot be
* read/written to or if there is any other low-level
* IO error
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, deletionPolicy, mfl.getLimit(), null, null);
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
.setMaxFieldLength(mfl.getLimit()).setIndexDeletionPolicy(
deletionPolicy));
}
/**
@ -947,43 +991,13 @@ public class IndexWriter implements Closeable {
* if it does not exist and <code>create</code> is
* <code>false</code> or if there is any other low-level
* IO error
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, create, deletionPolicy, mfl.getLimit(), null, null);
}
/**
* Expert: constructs an IndexWriter with a custom {@link
* IndexDeletionPolicy} and {@link IndexingChain},
* for the index in <code>d</code>.
* Text will be analyzed with <code>a</code>. If
* <code>create</code> is true, then a new, empty index
* will be created in <code>d</code>, replacing the index
* already there, if any.
*
* @param d the index directory
* @param a the analyzer to use
* @param create <code>true</code> to create the index or overwrite
* the existing one; <code>false</code> to append to the existing
* index
* @param deletionPolicy see <a href="#deletionPolicy">above</a>
* @param mfl whether or not to limit field lengths, value is in number of terms/tokens. See {@link org.apache.lucene.index.IndexWriter.MaxFieldLength}.
* @param indexingChain the {@link DocConsumer} chain to be used to
* process documents
* @param commit which commit to open
* @throws CorruptIndexException if the index is corrupt
* @throws LockObtainFailedException if another writer
* has this index open (<code>write.lock</code> could not
* be obtained)
* @throws IOException if the directory cannot be read/written to, or
* if it does not exist and <code>create</code> is
* <code>false</code> or if there is any other low-level
* IO error
*/
IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexingChain indexingChain, IndexCommit commit)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, create, deletionPolicy, mfl.getLimit(), indexingChain, commit);
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
mfl.getLimit()).setIndexDeletionPolicy(deletionPolicy));
}
/**
@ -1017,44 +1031,74 @@ public class IndexWriter implements Closeable {
* if it does not exist and <code>create</code> is
* <code>false</code> or if there is any other low-level
* IO error
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexCommit commit)
throws CorruptIndexException, LockObtainFailedException, IOException {
init(d, a, false, deletionPolicy, mfl.getLimit(), null, commit);
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
.setOpenMode(OpenMode.APPEND).setMaxFieldLength(mfl.getLimit())
.setIndexDeletionPolicy(deletionPolicy).setIndexCommit(commit));
}
private void init(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy,
int maxFieldLength, IndexingChain indexingChain, IndexCommit commit)
throws CorruptIndexException, LockObtainFailedException, IOException {
if (IndexReader.indexExists(d)) {
init(d, a, false, deletionPolicy, maxFieldLength, indexingChain, commit);
} else {
init(d, a, true, deletionPolicy, maxFieldLength, indexingChain, commit);
}
}
private void init(Directory d, Analyzer a, final boolean create,
IndexDeletionPolicy deletionPolicy, int maxFieldLength,
IndexingChain indexingChain, IndexCommit commit)
throws CorruptIndexException, LockObtainFailedException, IOException {
/**
* Constructs a new IndexWriter per the settings given in <code>conf</code>.
* Note that the passed in {@link IndexWriterConfig} is cloned and thus making
* changes to it after IndexWriter has been instantiated will not affect
* IndexWriter. Additionally, calling {@link #getConfig()} and changing the
* parameters does not affect that IndexWriter instance.
* <p>
* <b>NOTE:</b> by default, {@link IndexWriterConfig#getMaxFieldLength()}
* returns {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH}. Pay attention to
* whether this setting fits your application.
*
* @param d
* the index directory. The index is either created or appended
* according <code>conf.getOpenMode()</code>.
* @param conf
* the configuration settings according to which IndexWriter should
* be initalized.
* @throws CorruptIndexException
* if the index is corrupt
* @throws LockObtainFailedException
* if another writer has this index open (<code>write.lock</code>
* could not be obtained)
* @throws IOException
* if the directory cannot be read/written to, or if it does not
* exist and <code>conf.getOpenMode()</code> is
* <code>OpenMode.APPEND</code> or if there is any other low-level
* IO error
*/
public IndexWriter(Directory d, IndexWriterConfig conf)
throws CorruptIndexException, LockObtainFailedException, IOException {
config = (IndexWriterConfig) conf.clone();
directory = d;
analyzer = a;
analyzer = conf.getAnalyzer();
setMessageID(defaultInfoStream);
this.maxFieldLength = maxFieldLength;
maxFieldLength = conf.getMaxFieldLength();
termIndexInterval = conf.getTermIndexInterval();
writeLockTimeout = conf.getWriteLockTimeout();
similarity = conf.getSimilarity();
mergeScheduler = conf.getMergeScheduler();
if (indexingChain == null)
indexingChain = DocumentsWriter.DefaultIndexingChain;
OpenMode mode = conf.getOpenMode();
boolean create;
if (mode == OpenMode.CREATE) {
create = true;
} else if (mode == OpenMode.APPEND) {
create = false;
} else {
// CREATE_OR_APPEND - create only if an index does not exist
create = !IndexReader.indexExists(directory);
}
if (create) {
// Clear the write lock in case it's leftover:
directory.clearLock(WRITE_LOCK_NAME);
}
Lock writeLock = directory.makeLock(WRITE_LOCK_NAME);
writeLock = directory.makeLock(WRITE_LOCK_NAME);
if (!writeLock.obtain(writeLockTimeout)) // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
this.writeLock = writeLock; // save it
try {
if (create) {
@ -1085,6 +1129,7 @@ public class IndexWriter implements Closeable {
} else {
segmentInfos.read(directory);
IndexCommit commit = conf.getIndexCommit();
if (commit != null) {
// Swap out all segments, but, keep metadata in
// SegmentInfos, like version & generation, to
@ -1108,14 +1153,14 @@ public class IndexWriter implements Closeable {
setRollbackSegmentInfos(segmentInfos);
docWriter = new DocumentsWriter(directory, this, indexingChain);
docWriter = new DocumentsWriter(directory, this, conf.getIndexingChain());
docWriter.setInfoStream(infoStream);
docWriter.setMaxFieldLength(maxFieldLength);
// Default deleter (for backwards compatibility) is
// KeepOnlyLastCommitDeleter:
deleter = new IndexFileDeleter(directory,
deletionPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : deletionPolicy,
conf.getIndexDeletionPolicy(),
segmentInfos, infoStream, docWriter);
if (deleter.startingCommitDeleted)
@ -1125,20 +1170,22 @@ public class IndexWriter implements Closeable {
// segments_N file.
changeCount++;
docWriter.setMaxBufferedDeleteTerms(conf.getMaxBufferedDeleteTerms());
docWriter.setRAMBufferSizeMB(conf.getRAMBufferSizeMB());
docWriter.setMaxBufferedDocs(conf.getMaxBufferedDocs());
pushMaxBufferedDocs();
if (infoStream != null) {
message("init: create=" + create);
messageState();
}
} catch (IOException e) {
this.writeLock.release();
this.writeLock = null;
writeLock.release();
writeLock = null;
throw e;
}
}
private synchronized void setRollbackSegmentInfos(SegmentInfos infos) {
rollbackSegmentInfos = (SegmentInfos) infos.clone();
assert !rollbackSegmentInfos.hasExternalSegments(directory);
@ -1148,6 +1195,19 @@ public class IndexWriter implements Closeable {
rollbackSegments.put(rollbackSegmentInfos.info(i), Integer.valueOf(i));
}
/**
* Returns the {@link IndexWriterConfig} that was passed to
* {@link #IndexWriter(Directory, IndexWriterConfig)}. This allows querying
* IndexWriter's settings.
* <p>
* <b>NOTE:</b> setting any parameter on the returned instance has not effect
* on the IndexWriter instance. If you need to change those settings after
* IndexWriter has been created, you need to instantiate a new IndexWriter.
*/
public IndexWriterConfig getConfig() {
return config;
}
/**
* Expert: set the merge policy used by this writer.
*/
@ -1175,6 +1235,7 @@ public class IndexWriter implements Closeable {
/**
* Expert: set the merge scheduler used by this writer.
* @deprecated use {@link IndexWriterConfig#setMergeScheduler(MergeScheduler)} instead
*/
synchronized public void setMergeScheduler(MergeScheduler mergeScheduler) throws CorruptIndexException, IOException {
ensureOpen();
@ -1188,12 +1249,16 @@ public class IndexWriter implements Closeable {
this.mergeScheduler = mergeScheduler;
if (infoStream != null)
message("setMergeScheduler " + mergeScheduler);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setMergeScheduler(mergeScheduler);
}
/**
* Expert: returns the current MergePolicy in use by this
* Expert: returns the current MergeScheduler in use by this
* writer.
* @see #setMergePolicy
* @see #setMergeScheduler(MergeScheduler)
* @deprecated use {@link IndexWriterConfig#getMergeScheduler()} instead
*/
public MergeScheduler getMergeScheduler() {
ensureOpen();
@ -1219,6 +1284,7 @@ public class IndexWriter implements Closeable {
* LogByteSizeMergePolicy}) also allows you to set this
* limit by net size (in MB) of the segment, using {@link
* LogByteSizeMergePolicy#setMaxMergeMB}.</p>
* @deprecated use {@link LogMergePolicy#setMaxMergeDocs(int)} directly.
*/
public void setMaxMergeDocs(int maxMergeDocs) {
getLogMergePolicy().setMaxMergeDocs(maxMergeDocs);
@ -1234,6 +1300,7 @@ public class IndexWriter implements Closeable {
* Otherwise an IllegalArgumentException is thrown.</p>
*
* @see #setMaxMergeDocs
* @deprecated use {@link LogMergePolicy#getMaxMergeDocs()} directly.
*/
public int getMaxMergeDocs() {
return getLogMergePolicy().getMaxMergeDocs();
@ -1252,6 +1319,7 @@ public class IndexWriter implements Closeable {
* is your memory, but you should anticipate an OutOfMemoryError.<p/>
* By default, no more than {@link #DEFAULT_MAX_FIELD_LENGTH} terms
* will be indexed for a field.
* @deprecated use {@link IndexWriterConfig#setMaxFieldLength(int)} instead
*/
public void setMaxFieldLength(int maxFieldLength) {
ensureOpen();
@ -1259,12 +1327,16 @@ public class IndexWriter implements Closeable {
docWriter.setMaxFieldLength(maxFieldLength);
if (infoStream != null)
message("setMaxFieldLength " + maxFieldLength);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setMaxFieldLength(maxFieldLength);
}
/**
* Returns the maximum number of terms that will be
* indexed for a single field in a document.
* @see #setMaxFieldLength
* @deprecated use {@link IndexWriterConfig#getMaxFieldLength()} instead
*/
public int getMaxFieldLength() {
ensureOpen();
@ -1289,6 +1361,7 @@ public class IndexWriter implements Closeable {
* enabled but smaller than 2, or it disables maxBufferedDocs
* when ramBufferSize is already disabled
* @see #setRAMBufferSizeMB
* @deprecated use {@link IndexWriterConfig#setMaxBufferedDocs(int)} instead.
*/
public void setMaxBufferedDocs(int maxBufferedDocs) {
ensureOpen();
@ -1303,6 +1376,9 @@ public class IndexWriter implements Closeable {
pushMaxBufferedDocs();
if (infoStream != null)
message("setMaxBufferedDocs " + maxBufferedDocs);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setMaxBufferedDocs(maxBufferedDocs);
}
/**
@ -1329,6 +1405,7 @@ public class IndexWriter implements Closeable {
* Returns the number of buffered added documents that will
* trigger a flush if enabled.
* @see #setMaxBufferedDocs
* @deprecated use {@link IndexWriterConfig#getMaxBufferedDocs()} instead.
*/
public int getMaxBufferedDocs() {
ensureOpen();
@ -1372,6 +1449,7 @@ public class IndexWriter implements Closeable {
* @throws IllegalArgumentException if ramBufferSize is
* enabled but non-positive, or it disables ramBufferSize
* when maxBufferedDocs is already disabled
* @deprecated use {@link IndexWriterConfig#setRAMBufferSizeMB(double)} instead.
*/
public void setRAMBufferSizeMB(double mb) {
if (mb > 2048.0) {
@ -1386,10 +1464,14 @@ public class IndexWriter implements Closeable {
docWriter.setRAMBufferSizeMB(mb);
if (infoStream != null)
message("setRAMBufferSizeMB " + mb);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setRAMBufferSizeMB(mb);
}
/**
* Returns the value set by {@link #setRAMBufferSizeMB} if enabled.
* @deprecated use {@link IndexWriterConfig#getRAMBufferSizeMB()} instead.
*/
public double getRAMBufferSizeMB() {
return docWriter.getRAMBufferSizeMB();
@ -1406,6 +1488,7 @@ public class IndexWriter implements Closeable {
* @throws IllegalArgumentException if maxBufferedDeleteTerms
* is enabled but smaller than 1
* @see #setRAMBufferSizeMB
* @deprecated use {@link IndexWriterConfig#setMaxBufferedDeleteTerms(int)} instead.
*/
public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
ensureOpen();
@ -1416,12 +1499,16 @@ public class IndexWriter implements Closeable {
docWriter.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
if (infoStream != null)
message("setMaxBufferedDeleteTerms " + maxBufferedDeleteTerms);
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
}
/**
* Returns the number of buffered deleted terms that will
* trigger a flush if enabled.
* @see #setMaxBufferedDeleteTerms
* @deprecated use {@link IndexWriterConfig#getMaxBufferedDeleteTerms()} instead
*/
public int getMaxBufferedDeleteTerms() {
ensureOpen();
@ -1442,6 +1529,7 @@ public class IndexWriter implements Closeable {
* Otherwise an IllegalArgumentException is thrown.</p>
*
* <p>This must never be less than 2. The default value is 10.
* @deprecated use {@link LogMergePolicy#setMergeFactor(int)} directly.
*/
public void setMergeFactor(int mergeFactor) {
getLogMergePolicy().setMergeFactor(mergeFactor);
@ -1458,6 +1546,7 @@ public class IndexWriter implements Closeable {
* Otherwise an IllegalArgumentException is thrown.</p>
*
* @see #setMergeFactor
* @deprecated use {@link LogMergePolicy#getMergeFactor()} directly.
*/
public int getMergeFactor() {
return getLogMergePolicy().getMergeFactor();
@ -1494,15 +1583,11 @@ public class IndexWriter implements Closeable {
}
private void messageState() {
message("setInfoStream: dir=" + directory +
" mergePolicy=" + mergePolicy +
" mergeScheduler=" + mergeScheduler +
" ramBufferSizeMB=" + docWriter.getRAMBufferSizeMB() +
" maxBufferedDocs=" + docWriter.getMaxBufferedDocs() +
" maxBuffereDeleteTerms=" + docWriter.getMaxBufferedDeleteTerms() +
" maxFieldLength=" + maxFieldLength +
" index=" + segString() +
" version=" + Constants.LUCENE_VERSION);
message("\ndir=" + directory + "\n" +
"mergePolicy=" + mergePolicy + "\n" +
"index=" + segString() + "\n" +
"version=" + Constants.LUCENE_VERSION + "\n" +
config.toString());
}
/**
@ -1522,15 +1607,20 @@ public class IndexWriter implements Closeable {
/**
* Sets the maximum time to wait for a write lock (in milliseconds) for this instance of IndexWriter. @see
* @see #setDefaultWriteLockTimeout to change the default value for all instances of IndexWriter.
* @deprecated use {@link IndexWriterConfig#setWriteLockTimeout(long)} instead
*/
public void setWriteLockTimeout(long writeLockTimeout) {
ensureOpen();
this.writeLockTimeout = writeLockTimeout;
// Required so config.getSimilarity returns the right value. But this will
// go away together with the method in 4.0.
config.setWriteLockTimeout(writeLockTimeout);
}
/**
* Returns allowed timeout when acquiring the write lock.
* @see #setWriteLockTimeout
* @deprecated use {@link IndexWriterConfig#getWriteLockTimeout()}
*/
public long getWriteLockTimeout() {
ensureOpen();
@ -1540,18 +1630,20 @@ public class IndexWriter implements Closeable {
/**
* Sets the default (for any instance of IndexWriter) maximum time to wait for a write lock (in
* milliseconds).
* @deprecated use {@link IndexWriterConfig#setDefaultWriteLockTimeout(long)} instead
*/
public static void setDefaultWriteLockTimeout(long writeLockTimeout) {
IndexWriter.WRITE_LOCK_TIMEOUT = writeLockTimeout;
IndexWriterConfig.setDefaultWriteLockTimeout(writeLockTimeout);
}
/**
* Returns default write lock timeout for newly
* instantiated IndexWriters.
* @see #setDefaultWriteLockTimeout
* @deprecated use {@link IndexWriterConfig#getDefaultWriteLockTimeout()} instead
*/
public static long getDefaultWriteLockTimeout() {
return IndexWriter.WRITE_LOCK_TIMEOUT;
return IndexWriterConfig.getDefaultWriteLockTimeout();
}
/**
@ -4785,9 +4877,13 @@ public class IndexWriter implements Closeable {
}
/**
* Specifies maximum field length (in number of tokens/terms) in {@link IndexWriter} constructors.
* {@link #setMaxFieldLength(int)} overrides the value set by
* the constructor.
* Specifies maximum field length (in number of tokens/terms) in
* {@link IndexWriter} constructors. {@link #setMaxFieldLength(int)} overrides
* the value set by the constructor.
*
* @deprecated use {@link IndexWriterConfig} and pass
* {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH} or your own
* value.
*/
public static final class MaxFieldLength {

View File

@ -48,7 +48,7 @@ final class SegmentMerger {
private Directory directory;
private String segment;
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
private int termIndexInterval = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
private List<IndexReader> readers = new ArrayList<IndexReader>();
private FieldInfos fieldInfos;
@ -96,7 +96,7 @@ final class SegmentMerger {
}
};
}
termIndexInterval = writer.getTermIndexInterval();
termIndexInterval = writer.getConfig().getTermIndexInterval();
}
boolean hasProx() {

View File

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
@ -49,8 +50,9 @@ public class TestDemo extends LuceneTestCase {
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
IndexWriter iwriter = new IndexWriter(directory, analyzer, true,
new IndexWriter.MaxFieldLength(25000));
IndexWriter iwriter = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxFieldLength(25000));
Document doc = new Document();
String text = "This is the text to be indexed.";
doc.add(new Field("fieldname", text, Field.Store.YES,

View File

@ -18,9 +18,9 @@ package org.apache.lucene;
*/
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.document.Document;
@ -86,15 +86,14 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
MyMergeScheduler ms = new MyMergeScheduler();
writer.setMergeScheduler(ms);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMergeScheduler(new MyMergeScheduler())
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH));
for(int i=0;i<20;i++)
writer.addDocument(doc);
ms.sync();
((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.close();
assertTrue(mergeThreadCreated);

View File

@ -70,14 +70,14 @@ public class TestSearch extends LuceneTestCase {
private void doTestSearch(PrintWriter out, boolean useCompoundFile)
throws Exception
{
throws Exception {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(directory, analyzer, true,
IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
String[] docs = {
"a b c d e",

View File

@ -78,10 +78,11 @@ public class TestSearchForDuplicates extends LuceneTestCase {
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(directory, analyzer, true,
IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(useCompoundFiles);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFiles);
lmp.setUseCompoundDocStore(useCompoundFiles);
final int MAX_DOCS = 225;

View File

@ -31,6 +31,7 @@ import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.TestIndexWriter;
@ -67,9 +68,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
// Force frequent flushes
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(
new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
.setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<7;i++) {
@ -83,7 +85,9 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
writer.close();
copyFiles(dir, cp);
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
.setIndexDeletionPolicy(dp));
copyFiles(dir, cp);
for(int i=0;i<7;i++) {
writer.addDocument(doc);
@ -95,7 +99,9 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
writer.close();
copyFiles(dir, cp);
dp.release();
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
.setIndexDeletionPolicy(dp));
writer.close();
try {
copyFiles(dir, cp);
@ -111,10 +117,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
final long stopTime = System.currentTimeMillis() + 1000;
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
// Force frequent flushes
writer.setMaxBufferedDocs(2);
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(
new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
.setMaxBufferedDocs(2));
final Thread t = new Thread() {
@Override

View File

@ -27,6 +27,7 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermPositions;
import org.apache.lucene.store.Directory;
@ -37,7 +38,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
public void testCaching() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
TokenStream stream = new TokenStream() {
private int index = 0;

View File

@ -24,6 +24,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.queryParser.QueryParser;
@ -41,9 +42,9 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory,
new SimpleAnalyzer(TEST_VERSION_CURRENT),
true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
@ -70,7 +71,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir,new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED));
writer.addDocument(doc);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.analysis.PerFieldAnalyzerWrapper;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
@ -69,8 +70,7 @@ public class CollationTestBase extends LuceneTestCase {
String firstEnd, String secondBeg,
String secondEnd) throws Exception {
RAMDirectory ramDir = new RAMDirectory();
IndexWriter writer = new IndexWriter
(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628",
Field.Store.YES, Field.Index.ANALYZED));
@ -101,8 +101,7 @@ public class CollationTestBase extends LuceneTestCase {
String firstEnd, String secondBeg,
String secondEnd) throws Exception {
RAMDirectory ramDir = new RAMDirectory();
IndexWriter writer = new IndexWriter
(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
@ -125,13 +124,11 @@ public class CollationTestBase extends LuceneTestCase {
searcher.close();
}
public void testFarsiTermRangeQuery
(Analyzer analyzer, String firstBeg, String firstEnd,
String secondBeg, String secondEnd) throws Exception {
public void testFarsiTermRangeQuery(Analyzer analyzer, String firstBeg,
String firstEnd, String secondBeg, String secondEnd) throws Exception {
RAMDirectory farsiIndex = new RAMDirectory();
IndexWriter writer = new IndexWriter
(farsiIndex, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628",
Field.Store.YES, Field.Index.ANALYZED));
@ -178,8 +175,7 @@ public class CollationTestBase extends LuceneTestCase {
analyzer.addAnalyzer("France", franceAnalyzer);
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
analyzer.addAnalyzer("Denmark", denmarkAnalyzer);
IndexWriter writer = new IndexWriter
(indexStore, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
// document data:
// the tracer field is used to determine which document was hit

View File

@ -2,9 +2,9 @@ package org.apache.lucene.document;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.MockRAMDirectory;
/**
@ -27,8 +27,7 @@ import org.apache.lucene.store.MockRAMDirectory;
/**
* Tests {@link Document} class.
*/
public class TestBinaryDocument extends LuceneTestCase
{
public class TestBinaryDocument extends LuceneTestCase {
String binaryValStored = "this text will be stored as a byte array in the index";
String binaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
@ -58,7 +57,7 @@ public class TestBinaryDocument extends LuceneTestCase
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(doc);
writer.close();
@ -83,9 +82,7 @@ public class TestBinaryDocument extends LuceneTestCase
dir.close();
}
public void testCompressionTools()
throws Exception
{
public void testCompressionTools() throws Exception {
Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.compress(binaryValCompressed.getBytes()));
Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.compressString(binaryValCompressed));
@ -96,7 +93,7 @@ public class TestBinaryDocument extends LuceneTestCase
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(doc);
writer.close();

View File

@ -2,6 +2,7 @@ package org.apache.lucene.document;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -151,10 +152,11 @@ public class TestDocument extends LuceneTestCase
*
* @throws Exception on error
*/
public void testGetValuesForIndexedDocument() throws Exception
{
public void testGetValuesForIndexedDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
writer.addDocument(makeDocumentWithFields());
writer.close();
@ -225,7 +227,9 @@ public class TestDocument extends LuceneTestCase
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
writer.addDocument(doc);
field.setValue("id2");
writer.addDocument(doc);

View File

@ -232,10 +232,9 @@ class DocHelper {
* @param doc
* @throws IOException
*/
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException
{
IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
writer.setSimilarity(similarity);
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setSimilarity(similarity));
//writer.setUseCompoundFile(false);
writer.addDocument(doc);
writer.commit();

View File

@ -20,9 +20,9 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
@ -39,27 +39,28 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, true);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.close();
writer = newWriter(aux, true);
writer.setUseCompoundFile(false); // use one without a compound file
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 40 documents in separate files
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
writer = newWriter(aux2, true);
writer = newWriter(aux2, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
// add 40 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, false);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
assertEquals(190, writer.maxDoc());
@ -73,14 +74,14 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
// now add another set in.
Directory aux3 = new RAMDirectory();
writer = newWriter(aux3, true);
writer = newWriter(aux3, new IndexWriterConfig(TEST_VERSION_CURRENT));
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
// test doc count before segments are merged/index is optimized
writer = newWriter(dir, false);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux3 });
assertEquals(230, writer.maxDoc());
@ -94,7 +95,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "bbb"), 50);
// now optimize it.
writer = newWriter(dir, false);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -107,11 +108,11 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
// now add a single document
Directory aux4 = new RAMDirectory();
writer = newWriter(aux4, true);
writer = newWriter(aux4, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocs2(writer, 1);
writer.close();
writer = newWriter(dir, false);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux4 });
assertEquals(231, writer.maxDoc());
@ -129,7 +130,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.addIndexesNoOptimize(new Directory[] {aux});
// Adds 10 docs, then replaces them with another 10
@ -166,7 +167,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -205,7 +206,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -246,25 +247,25 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, true);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.close();
writer = newWriter(aux, true);
writer.setUseCompoundFile(false); // use one without a compound file
writer.setMaxBufferedDocs(1000);
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 140 documents in separate files
addDocs(writer, 40);
writer.close();
writer = newWriter(aux, true);
writer.setUseCompoundFile(false); // use one without a compound file
writer.setMaxBufferedDocs(1000);
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
addDocs(writer, 100);
writer.close();
writer = newWriter(dir, false);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
try {
// cannot add self
writer.addIndexesNoOptimize(new Directory[] { aux, dir });
@ -290,9 +291,10 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(4);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
addDocs(writer, 10);
writer.addIndexesNoOptimize(new Directory[] { aux });
@ -314,9 +316,8 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
writer.setMaxBufferedDocs(9);
writer.setMergeFactor(4);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
addDocs(writer, 2);
writer.addIndexesNoOptimize(new Directory[] { aux });
@ -338,9 +339,10 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, false);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(4);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
assertEquals(1060, writer.maxDoc());
@ -367,9 +369,10 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
assertEquals(10, reader.numDocs());
reader.close();
IndexWriter writer = newWriter(dir, false);
writer.setMaxBufferedDocs(4);
writer.setMergeFactor(4);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(4));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
assertEquals(1020, writer.maxDoc());
@ -390,9 +393,10 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
setUpDirs(dir, aux);
IndexWriter writer = newWriter(aux2, true);
writer.setMaxBufferedDocs(100);
writer.setMergeFactor(10);
IndexWriter writer = newWriter(aux2, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(
100));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.addIndexesNoOptimize(new Directory[] { aux });
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
@ -412,9 +416,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
assertEquals(22, reader.numDocs());
reader.close();
writer = newWriter(dir, false);
writer.setMaxBufferedDocs(6);
writer.setMergeFactor(4);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(6));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
assertEquals(1025, writer.maxDoc());
@ -425,9 +429,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
verifyNumDocs(dir, 1025);
}
private IndexWriter newWriter(Directory dir, boolean create)
private IndexWriter newWriter(Directory dir, IndexWriterConfig conf)
throws IOException {
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
final IndexWriter writer = new IndexWriter(dir, conf);
writer.setMergePolicy(new LogDocMergePolicy(writer));
return writer;
}
@ -471,26 +475,25 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
private void setUpDirs(Directory dir, Directory aux) throws IOException {
IndexWriter writer = null;
writer = newWriter(dir, true);
writer.setMaxBufferedDocs(1000);
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
addDocs(writer, 1000);
assertEquals(1000, writer.maxDoc());
assertEquals(1, writer.getSegmentCount());
writer.close();
writer = newWriter(aux, true);
writer.setUseCompoundFile(false); // use one without a compound file
writer.setMaxBufferedDocs(100);
writer.setMergeFactor(10);
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
// add 30 documents in 3 segments
for (int i = 0; i < 3; i++) {
addDocs(writer, 10);
writer.close();
writer = newWriter(aux, false);
writer.setUseCompoundFile(false); // use one without a compound file
writer.setMaxBufferedDocs(100);
writer.setMergeFactor(10);
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
}
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
@ -501,18 +504,19 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
public void testHangOnClose() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
writer.setMaxBufferedDocs(5);
writer.setUseCompoundFile(false);
writer.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(5));
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(100);
writer.setMergePolicy(lmp);
Document doc = new Document();
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<60;i++)
writer.addDocument(doc);
writer.setMaxBufferedDocs(200);
Document doc2 = new Document();
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
Field.Index.NO));
@ -527,13 +531,13 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
writer.close();
Directory dir2 = new MockRAMDirectory();
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
writer = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT).setMergeScheduler(new SerialMergeScheduler()));
lmp = new LogByteSizeMergePolicy(writer);
lmp.setMinMergeMB(0.0001);
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
lmp.setMergeFactor(4);
writer.setMergePolicy(lmp);
writer.setMergeFactor(4);
writer.setUseCompoundFile(false);
writer.setMergeScheduler(new SerialMergeScheduler());
writer.addIndexesNoOptimize(new Directory[] {dir});
writer.close();
dir.close();
@ -544,14 +548,16 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
// is respected when copying tail segments
public void testTargetCFS() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = newWriter(dir, true);
writer.setUseCompoundFile(false);
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
addDocs(writer, 1);
writer.close();
Directory other = new RAMDirectory();
writer = newWriter(other, true);
writer.setUseCompoundFile(true);
writer = newWriter(other, new IndexWriterConfig(TEST_VERSION_CURRENT));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true);
writer.addIndexesNoOptimize(new Directory[] {dir});
assertTrue(writer.newestSegment().getUseCompoundFile());
writer.close();

View File

@ -19,20 +19,19 @@ package org.apache.lucene.index;
import org.apache.lucene.util.*;
import org.apache.lucene.store.*;
import org.apache.lucene.document.*;
import org.apache.lucene.analysis.*;
import java.util.Random;
import java.io.File;
import java.io.IOException;
public class TestAtomicUpdate extends LuceneTestCase {
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
private Random RANDOM;
private static final class MockIndexWriter extends IndexWriter {
public class MockIndexWriter extends IndexWriter {
static Random RANDOM;
public MockIndexWriter(Directory dir, Analyzer a, boolean create, IndexWriter.MaxFieldLength mfl) throws IOException {
super(dir, a, create, mfl);
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
@ -126,9 +125,8 @@ public class TestAtomicUpdate extends LuceneTestCase {
TimedThread[] threads = new TimedThread[4];
IndexWriter writer = new MockIndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(7);
writer.setMergeFactor(3);
IndexWriter writer = new MockIndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(7));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
// Establish a base index of 100 docs:
for(int i=0;i<100;i++) {
@ -183,7 +181,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
FSDirectory.
*/
public void testAtomicUpdates() throws Exception {
RANDOM = newRandom();
MockIndexWriter.RANDOM = newRandom();
Directory directory;
// First in a RAM directory:

View File

@ -32,12 +32,12 @@ import java.util.ArrayList;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
@ -52,8 +52,7 @@ import org.apache.lucene.util._TestUtil;
against it, and add documents to it.
*/
public class TestBackwardsCompatibility extends LuceneTestCase
{
public class TestBackwardsCompatibility extends LuceneTestCase {
// Uncomment these cases & run them on an older Lucene
// version, to generate an index to test backwards
@ -215,7 +214,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
hasTested29++;
}
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.optimize();
w.close();
@ -355,7 +354,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
Directory dir = FSDirectory.open(new File(dirName));
// open writer
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
// add 10 docs
for(int i=0;i<10;i++) {
@ -399,7 +398,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
searcher.close();
// optimize
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -449,7 +448,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
searcher.close();
// optimize
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -471,9 +470,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase
dirName = fullDir(dirName);
Directory dir = FSDirectory.open(new File(dirName));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(doCFS);
writer.setMaxBufferedDocs(10);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
for(int i=0;i<35;i++) {
addDoc(writer, i);
@ -482,9 +481,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase
writer.close();
// open fresh writer so we get no prx file in the added segment
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(doCFS);
writer.setMaxBufferedDocs(10);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
addNoProxDoc(writer);
writer.close();
@ -509,8 +508,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase
try {
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setRAMBufferSizeMB(16.0);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for(int i=0;i<35;i++) {
addDoc(writer, i);
}

View File

@ -25,7 +25,6 @@ import java.util.ArrayList;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.util.Constants;
@ -34,9 +33,7 @@ public class TestCheckIndex extends LuceneTestCase {
public void testDeletedDocs() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<19;i++) {

View File

@ -17,20 +17,17 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
public class TestConcurrentMergeScheduler extends LuceneTestCase {
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
boolean doFail;
boolean hitExc;
@ -68,10 +65,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
Document doc = new Document();
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
@ -115,9 +109,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
writer.setMergePolicy(mp);
@ -157,12 +149,10 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
for(int iter=0;iter<7;iter++) {
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
writer.setMaxBufferedDocs(2);
for(int j=0;j<21;j++) {
Document doc = new Document();
@ -174,7 +164,9 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(2));
}
writer.close();
@ -189,13 +181,10 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
for(int iter=0;iter<10;iter++) {
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
writer.setMergeScheduler(cms);
writer.setMaxBufferedDocs(2);
writer.setMergeFactor(100);
for(int j=0;j<201;j++) {
idField.setValue(Integer.toString(iter*201+j));
@ -210,7 +199,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Force a bunch of merge threads to kick off so we
// stress out aborting them on close:
writer.setMergeFactor(3);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
writer.addDocument(doc);
writer.commit();
@ -221,7 +210,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
reader.close();
// Reopen
writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
}
writer.close();

View File

@ -20,7 +20,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.document.Document;
@ -35,10 +34,8 @@ public class TestCrash extends LuceneTestCase {
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
//writer.setMaxBufferedDocs(2);
writer.setMaxBufferedDocs(10);
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
Document doc = new Document();
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
@ -51,7 +48,7 @@ public class TestCrash extends LuceneTestCase {
private void crash(final IndexWriter writer) throws IOException {
final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getMergeScheduler();
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
dir.crash();
cms.sync();
dir.clearCrash();

View File

@ -23,9 +23,9 @@ import java.util.List;
import java.util.Set;
import java.util.Collection;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
@ -40,8 +40,8 @@ import org.apache.lucene.util.LuceneTestCase;
against it, and add documents to it.
*/
public class TestDeletionPolicy extends LuceneTestCase
{
public class TestDeletionPolicy extends LuceneTestCase {
private void verifyCommitOrder(List<? extends IndexCommit> commits) throws IOException {
final IndexCommit firstCommit = commits.get(0);
long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
@ -201,8 +201,10 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.close();
long lastDeleteTime = 0;
@ -210,8 +212,11 @@ public class TestDeletionPolicy extends LuceneTestCase
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
}
@ -271,17 +276,22 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
policy.dir = dir;
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
writer.setMergeScheduler(new SerialMergeScheduler());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10).setMergeScheduler(new SerialMergeScheduler()));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.optimize();
writer.close();
@ -318,7 +328,9 @@ public class TestDeletionPolicy extends LuceneTestCase
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.listAll().length;
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy));
writer.close();
int postCount = dir.listAll().length;
assertTrue(postCount < preCount);
@ -340,8 +352,9 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new MockRAMDirectory();
policy.dir = dir;
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(2));
for(int i=0;i<10;i++) {
addDoc(writer);
if ((1+i)%2 == 0)
@ -359,7 +372,7 @@ public class TestDeletionPolicy extends LuceneTestCase
assertTrue(lastCommit != null);
// Now add 1 doc and optimize
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.optimize();
@ -368,7 +381,8 @@ public class TestDeletionPolicy extends LuceneTestCase
assertEquals(7, IndexReader.listCommits(dir).size());
// Now open writer on the commit just before optimize:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Should undo our rollback:
@ -380,7 +394,8 @@ public class TestDeletionPolicy extends LuceneTestCase
assertEquals(11, r.numDocs());
r.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
writer.close();
@ -396,7 +411,7 @@ public class TestDeletionPolicy extends LuceneTestCase
r.close();
// Reoptimize
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
writer.optimize();
writer.close();
@ -407,7 +422,7 @@ public class TestDeletionPolicy extends LuceneTestCase
// Now open writer on the commit just before optimize,
// but this time keeping only the last commit:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees optimized index, because writer
@ -443,16 +458,22 @@ public class TestDeletionPolicy extends LuceneTestCase
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.optimize();
writer.close();
@ -486,9 +507,12 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j<N+1;j++) {
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int i=0;i<17;i++) {
addDoc(writer);
}
@ -541,15 +565,23 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.close();
Term searchTerm = new Term("content", "aaa");
Query query = new TermQuery(searchTerm);
for(int i=0;i<N+1;i++) {
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
}
@ -565,8 +597,11 @@ public class TestDeletionPolicy extends LuceneTestCase
reader.close();
searcher.close();
}
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.optimize();
// this is a commit
writer.close();
@ -636,18 +671,24 @@ public class TestDeletionPolicy extends LuceneTestCase
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
writer.close();
Term searchTerm = new Term("content", "aaa");
Query query = new TermQuery(searchTerm);
for(int i=0;i<N+1;i++) {
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(10);
writer.setUseCompoundFile(useCompoundFile);
writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(useCompoundFile);
lmp.setUseCompoundDocStore(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
}
@ -663,7 +704,9 @@ public class TestDeletionPolicy extends LuceneTestCase
reader.close();
searcher.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy));
// This will not commit: there are no changes
// pending because we opened for "create":
writer.close();

View File

@ -22,6 +22,7 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
@ -187,7 +188,10 @@ public class TestDirectoryReader extends LuceneTestCase {
}
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter iw = new IndexWriter(ramDir1, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(
new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND));
Document doc = new Document();
doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
iw.addDocument(doc);

View File

@ -29,9 +29,9 @@ import java.util.List;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.LuceneTestCase;
@ -109,7 +109,7 @@ public class TestDoc extends LuceneTestCase {
PrintWriter out = new PrintWriter(sw, true);
Directory directory = FSDirectory.open(indexDir);
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
SegmentInfo si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);
@ -137,7 +137,8 @@ public class TestDoc extends LuceneTestCase {
out = new PrintWriter(sw, true);
directory = FSDirectory.open(indexDir);
writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(directory, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
si1 = indexDoc(writer, "test.txt");
printSegment(out, si1);

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.WhitespaceTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
@ -61,8 +60,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testAddDocument() throws Exception {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(testDoc);
writer.commit();
SegmentInfo info = writer.newestSegment();
@ -119,7 +117,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
@ -182,7 +180,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document doc = new Document();
doc.add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
@ -207,7 +205,9 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("preanalyzed", new TokenStream() {
@ -266,7 +266,9 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
writer.addDocument(doc);
writer.close();
@ -299,7 +301,9 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(f);
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
TEST_VERSION_CURRENT)));
writer.addDocument(doc);
writer.optimize(); // be sure to have a single segment
writer.close();

View File

@ -17,22 +17,31 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.*;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.LoadFirstFieldSelector;
import org.apache.lucene.document.SetBasedFieldSelector;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class TestFieldsReader extends LuceneTestCase {
private RAMDirectory dir = new RAMDirectory();
private Document testDoc = new Document();
@ -50,8 +59,9 @@ public class TestFieldsReader extends LuceneTestCase {
fieldInfos = new FieldInfos();
DocHelper.setupDoc(testDoc);
fieldInfos.add(testDoc);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
writer.addDocument(testDoc);
writer.close();
}
@ -207,8 +217,8 @@ public class TestFieldsReader extends LuceneTestCase {
FSDirectory tmpDir = FSDirectory.open(file);
assertTrue(tmpDir != null);
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
writer.addDocument(testDoc);
writer.close();
@ -387,7 +397,8 @@ public class TestFieldsReader extends LuceneTestCase {
try {
Directory dir = new FaultyFSDirectory(indexDir);
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
for(int i=0;i<2;i++)
writer.addDocument(testDoc);
writer.optimize();

View File

@ -24,7 +24,6 @@ import junit.textui.TestRunner;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -97,8 +96,7 @@ public class TestFilterIndexReader extends LuceneTestCase {
*/
public void testFilterIndexReader() throws Exception {
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d1 = new Document();
d1.add(new Field("default","one two", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -18,13 +18,14 @@ package org.apache.lucene.index;
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import java.io.*;
import java.util.*;
@ -33,19 +34,19 @@ import java.util.*;
against it, and add documents to it.
*/
public class TestIndexFileDeleter extends LuceneTestCase
{
public class TestIndexFileDeleter extends LuceneTestCase {
public void testDeleteLeftoverFiles() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
int i;
for(i=0;i<35;i++) {
addDoc(writer, i);
}
writer.setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
for(;i<45;i++) {
addDoc(writer, i);
}
@ -144,7 +145,7 @@ public class TestIndexFileDeleter extends LuceneTestCase
// Open & close a writer: it should delete the above 4
// files and nothing more:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.close();
String[] files2 = dir.listAll();

View File

@ -34,14 +34,13 @@ import java.util.SortedSet;
import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.SetBasedFieldSelector;
import org.apache.lucene.index.IndexReader.FieldOption;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
@ -78,8 +77,7 @@ public class TestIndexReader extends LuceneTestCase
commitUserData.put("foo", "fighters");
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
writer.close();
@ -100,8 +98,8 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
writer.close();
@ -111,7 +109,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(r2.getIndexCommit().isOptimized());
r3.close();
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -122,22 +120,21 @@ public class TestIndexReader extends LuceneTestCase
d.close();
}
public void testIsCurrent() throws Exception
{
public void testIsCurrent() throws Exception {
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
writer.close();
// set up reader:
IndexReader reader = IndexReader.open(d, false);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
@ -149,11 +146,10 @@ public class TestIndexReader extends LuceneTestCase
* Tests the IndexReader.getFieldNames implementation
* @throws Exception on error
*/
public void testGetFieldNames() throws Exception
{
public void testGetFieldNames() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
writer.close();
// set up reader
@ -165,20 +161,18 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(fieldNames.contains("unstored"));
reader.close();
// add more documents
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
// want to get some more segments here
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
{
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
for (int i = 0; i < 5*mergeFactor; i++) {
addDocumentWithFields(writer);
}
// new fields are in some different segments (we hope)
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
{
for (int i = 0; i < 5*mergeFactor; i++) {
addDocumentWithDifferentFields(writer);
}
// new termvector fields
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
{
for (int i = 0; i < 5*mergeFactor; i++) {
addDocumentWithTermVectorFields(writer);
}
@ -245,10 +239,11 @@ public class TestIndexReader extends LuceneTestCase
public void testTermVectors() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
// want to get some more segments here
// new termvector fields
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
for (int i = 0; i < 5 * mergeFactor; i++) {
Document doc = new Document();
doc.add(new Field("tvnot","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
doc.add(new Field("termvector","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
@ -271,10 +266,6 @@ public class TestIndexReader extends LuceneTestCase
assertTrue("entry is null and it shouldn't be", entry != null);
System.out.println("Entry: " + entry);
}
}
@ -302,10 +293,7 @@ public class TestIndexReader extends LuceneTestCase
}
public void testBasicDelete() throws IOException
{
public void testBasicDelete() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = null;
@ -313,9 +301,8 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 100 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm.text());
}
writer.close();
@ -350,12 +337,11 @@ public class TestIndexReader extends LuceneTestCase
dir.close();
}
public void testBinaryFields() throws IOException
{
public void testBinaryFields() throws IOException {
Directory dir = new RAMDirectory();
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 10; i++) {
addDoc(writer, "document number " + (i + 1));
@ -364,7 +350,7 @@ public class TestIndexReader extends LuceneTestCase
addDocumentWithTermVectorFields(writer);
}
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
Document doc = new Document();
doc.add(new Field("bin1", bin));
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
@ -401,7 +387,7 @@ public class TestIndexReader extends LuceneTestCase
// force optimize
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
reader = IndexReader.open(dir, false);
@ -421,8 +407,7 @@ public class TestIndexReader extends LuceneTestCase
// Make sure attempts to make changes after reader is
// closed throws IOException:
public void testChangesAfterClose() throws IOException
{
public void testChangesAfterClose() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = null;
@ -430,9 +415,8 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 11; i++)
{
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 11; i++) {
addDoc(writer, searchTerm.text());
}
writer.close();
@ -466,8 +450,7 @@ public class TestIndexReader extends LuceneTestCase
}
// Make sure we get lock obtain failed exception with 2 writers:
public void testLockObtainFailed() throws IOException
{
public void testLockObtainFailed() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = null;
@ -475,9 +458,8 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 11 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 11; i++)
{
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 11; i++) {
addDoc(writer, searchTerm.text());
}
@ -521,7 +503,7 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDoc(writer, searchTerm.text());
writer.close();
@ -558,16 +540,16 @@ public class TestIndexReader extends LuceneTestCase
// Make sure you can set norms & commit, and there are
// no extra norms files left:
public void testWritingNormsNoReader() throws IOException
{
public void testWritingNormsNoReader() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
Term searchTerm = new Term("content", "aaa");
// add 1 documents with term : aaa
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
addDoc(writer, searchTerm.text());
writer.close();
@ -611,8 +593,7 @@ public class TestIndexReader extends LuceneTestCase
deleteReaderWriterConflict(true);
}
private void deleteReaderWriterConflict(boolean optimize) throws IOException
{
private void deleteReaderWriterConflict(boolean optimize) throws IOException {
//Directory dir = new RAMDirectory();
Directory dir = getDirectory();
@ -620,9 +601,8 @@ public class TestIndexReader extends LuceneTestCase
Term searchTerm2 = new Term("content", "bbb");
// add 100 documents with term : aaa
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm.text());
}
writer.close();
@ -636,9 +616,8 @@ public class TestIndexReader extends LuceneTestCase
assertTermDocsCount("first reader", reader, searchTerm2, 0);
// add 100 documents with term : bbb
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm2.text());
}
@ -698,12 +677,11 @@ public class TestIndexReader extends LuceneTestCase
return FSDirectory.open(new File(System.getProperty("tempDir"), "testIndex"));
}
public void testFilesOpenClose() throws IOException
{
public void testFilesOpenClose() throws IOException {
// Create initial data set
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
Directory dir = getDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDoc(writer, "test");
writer.close();
dir.close();
@ -713,7 +691,7 @@ public class TestIndexReader extends LuceneTestCase
dir = getDirectory();
// Now create the data set again, just as before
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
addDoc(writer, "test");
writer.close();
dir.close();
@ -739,7 +717,7 @@ public class TestIndexReader extends LuceneTestCase
else
dir = getDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -756,7 +734,7 @@ public class TestIndexReader extends LuceneTestCase
// incremented:
Thread.sleep(1000);
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
@ -773,7 +751,7 @@ public class TestIndexReader extends LuceneTestCase
public void testVersion() throws IOException {
Directory dir = new MockRAMDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
@ -784,7 +762,7 @@ public class TestIndexReader extends LuceneTestCase
reader.close();
// modify index and check version has been
// incremented:
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
@ -795,10 +773,10 @@ public class TestIndexReader extends LuceneTestCase
public void testLock() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
IndexReader reader = IndexReader.open(dir, false);
try {
reader.deleteDocument(0);
@ -815,7 +793,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -832,7 +810,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterClose() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -849,7 +827,7 @@ public class TestIndexReader extends LuceneTestCase
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
addDocumentWithFields(writer);
writer.close();
@ -887,7 +865,7 @@ public class TestIndexReader extends LuceneTestCase
// First build up a starting index:
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for(int i=0;i<157;i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
@ -1077,7 +1055,7 @@ public class TestIndexReader extends LuceneTestCase
public void testDocsOutOfOrderJIRA140() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for(int i=0;i<11;i++) {
addDoc(writer, "aaa");
}
@ -1095,7 +1073,7 @@ public class TestIndexReader extends LuceneTestCase
}
reader.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
// We must add more docs to get a new segment written
for(int i=0;i<11;i++) {
@ -1117,7 +1095,7 @@ public class TestIndexReader extends LuceneTestCase
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDoc(writer, "aaa");
writer.close();
@ -1182,8 +1160,7 @@ public class TestIndexReader extends LuceneTestCase
dir.close();
}
private void deleteReaderReaderConflict(boolean optimize) throws IOException
{
private void deleteReaderReaderConflict(boolean optimize) throws IOException {
Directory dir = getDirectory();
Term searchTerm1 = new Term("content", "aaa");
@ -1193,9 +1170,8 @@ public class TestIndexReader extends LuceneTestCase
// add 100 documents with term : aaa
// add 100 documents with term : bbb
// add 100 documents with term : ccc
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 100; i++)
{
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
for (int i = 0; i < 100; i++) {
addDoc(writer, searchTerm1.text());
addDoc(writer, searchTerm2.text());
addDoc(writer, searchTerm3.text());
@ -1417,8 +1393,7 @@ public class TestIndexReader extends LuceneTestCase
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
writer.close();
@ -1433,8 +1408,8 @@ public class TestIndexReader extends LuceneTestCase
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
writer.close();
@ -1444,7 +1419,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(r2.getIndexCommit().isOptimized());
r2.close();
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -1458,7 +1433,7 @@ public class TestIndexReader extends LuceneTestCase
public void testReadOnly() throws Throwable {
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
addDocumentWithFields(writer);
writer.commit();
addDocumentWithFields(writer);
@ -1472,7 +1447,7 @@ public class TestIndexReader extends LuceneTestCase
// expected
}
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.close();
@ -1489,7 +1464,7 @@ public class TestIndexReader extends LuceneTestCase
// expected
}
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@ -1507,7 +1482,7 @@ public class TestIndexReader extends LuceneTestCase
}
// Make sure write lock isn't held
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
writer.close();
r3.close();
@ -1517,8 +1492,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1474
public void testIndexReader() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
writer.addDocument(createDocument("c"));
@ -1535,8 +1509,7 @@ public class TestIndexReader extends LuceneTestCase
public void testIndexReaderUnDeleteAll() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
dir.setPreventDoubleWrite(false);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
writer.addDocument(createDocument("c"));
@ -1577,10 +1550,7 @@ public class TestIndexReader extends LuceneTestCase
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
@ -1603,7 +1573,7 @@ public class TestIndexReader extends LuceneTestCase
// reuse the doc values arrays in FieldCache
public void testFieldCacheReuseAfterClone() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.addDocument(doc);
@ -1634,7 +1604,7 @@ public class TestIndexReader extends LuceneTestCase
// FieldCache
public void testFieldCacheReuseAfterReopen() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.addDocument(doc);
@ -1666,7 +1636,7 @@ public class TestIndexReader extends LuceneTestCase
// reopen switches readOnly
public void testReopenChangeReadonly() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
writer.addDocument(doc);
@ -1707,7 +1677,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1586: getUniqueTermCount
public void testUniqueTermCount() throws Exception {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
@ -1740,7 +1710,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-1609: don't load terms index
public void testNoTermsIndex() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
@ -1758,7 +1728,7 @@ public class TestIndexReader extends LuceneTestCase
assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.addDocument(doc);
writer.close();
@ -1777,7 +1747,7 @@ public class TestIndexReader extends LuceneTestCase
// LUCENE-2046
public void testPrepareCommitIsCurrent() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
writer.addDocument(doc);
IndexReader r = IndexReader.open(dir, true);

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import org.apache.lucene.index.SegmentReader.Norm;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory;
@ -197,7 +196,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
TestIndexReaderReopen.createIndex(dir1, true);
IndexReader reader1 = IndexReader.open(dir1, false);
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.optimize();
w.close();
IndexReader reader2 = reader1.clone(true);
@ -484,8 +483,9 @@ public class TestIndexReaderClone extends LuceneTestCase {
public void testCloseStoredFields() throws Exception {
final Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
w.setUseCompoundFile(false);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
((LogMergePolicy) w.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) w.getMergePolicy()).setUseCompoundDocStore(false);
Document doc = new Document();
doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
w.addDocument(doc);

View File

@ -28,6 +28,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.SegmentReader.Norm;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
@ -118,10 +119,10 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
Directory dir3 = FSDirectory.open(indexDir3);
createIndex(dir3);
IndexWriter iw = new IndexWriter(dir3, anlzr, false,
IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5));
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.addIndexesNoOptimize(new Directory[] { dir1, dir2 });
iw.optimize();
iw.close();
@ -137,9 +138,9 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
doTestNorms(dir3);
// now with optimize
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.optimize();
iw.close();
verifyIndex(dir3);
@ -238,12 +239,13 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
}
private void createIndex(Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir, anlzr, true,
IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);
iw.setUseCompoundFile(true);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
lmp.setUseCompoundDocStore(true);
iw.close();
}
@ -290,12 +292,13 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
private void addDocs(Directory dir, int ndocs, boolean compound)
throws IOException {
IndexWriter iw = new IndexWriter(dir, anlzr, false,
IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);
iw.setUseCompoundFile(compound);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
lmp.setUseCompoundDocStore(compound);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
}

View File

@ -31,13 +31,11 @@ import java.util.HashMap;
import java.util.Set;
import org.apache.lucene.analysis.KeywordAnalyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
@ -171,8 +169,9 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
private void doTestReopenWithCommit (Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, new KeywordAnalyzer(), true, MaxFieldLength.LIMITED);
iwriter.setMergeScheduler(new SerialMergeScheduler());
IndexWriter iwriter = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
new KeywordAnalyzer()).setMergeScheduler(new SerialMergeScheduler()));
IndexReader reader = IndexReader.open(dir, false);
try {
int M = 3;
@ -702,7 +701,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
final Directory dir = new MockRAMDirectory();
final int n = 30;
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
@ -721,7 +720,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
modifier.deleteDocument(i % modifier.maxDoc());
modifier.close();
} else {
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
modifier.addDocument(createDocument(n + i, 6));
modifier.close();
}
@ -946,7 +945,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
IndexWriter.unlock(dir);
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.setMergePolicy(new LogDocMergePolicy(w));
@ -991,7 +990,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
static void modifyIndex(int i, Directory dir) throws IOException {
switch (i) {
case 0: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.close();
@ -1006,13 +1005,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
break;
}
case 2: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.optimize();
w.close();
break;
}
case 3: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.addDocument(createDocument(101, 4));
w.optimize();
w.addDocument(createDocument(102, 4));
@ -1028,7 +1027,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
break;
}
case 5: {
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.addDocument(createDocument(101, 4));
w.close();
break;
@ -1192,7 +1191,8 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testReopenOnCommit() throws Throwable {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setIndexDeletionPolicy(new KeepAllCommits()));
for(int i=0;i<4;i++) {
Document doc = new Document();
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.Arrays;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.search.IndexSearcher;
@ -41,10 +40,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setUseCompoundFile(true);
modifier.setMaxBufferedDeleteTerms(1);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(1));
for (int i = 0; i < keywords.length; i++) {
Document doc = new Document();
@ -78,10 +75,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testNonRAMDelete() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -113,9 +109,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testMaxBufferedDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDeleteTerms(1);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(1));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
@ -128,10 +123,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testRAMDeletes() throws IOException {
for(int t=0;t<2;t++) {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(4);
modifier.setMaxBufferedDeleteTerms(4);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(4)
.setMaxBufferedDeleteTerms(4));
int id = 0;
int value = 100;
@ -170,10 +164,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test when delete terms apply to both disk and ram segments
public void testBothDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(100);
modifier.setMaxBufferedDeleteTerms(100);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(100)
.setMaxBufferedDeleteTerms(100));
int id = 0;
int value = 100;
@ -203,10 +196,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test that batched delete terms are flushed together
public void testBatchDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -247,10 +239,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll()
public void testDeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -294,10 +285,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test rollback of deleteAll()
public void testDeleteAllRollback() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -332,10 +322,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll() w/ near real-time reader
public void testDeleteAllNRT() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(2);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -424,8 +413,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// First build up a starting index:
MockRAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
@ -447,11 +435,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
while (!done) {
MockRAMDirectory dir = new MockRAMDirectory(startDir);
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setMaxBufferedDocs(1000); // use flush or close
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000));
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
@ -653,10 +639,11 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
modifier.setUseCompoundFile(true);
modifier.setMaxBufferedDeleteTerms(2);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(2));
LogMergePolicy lmp = (LogMergePolicy) modifier.getMergePolicy();
lmp.setUseCompoundFile(true);
lmp.setUseCompoundDocStore(true);
dir.failOn(failure.reset());
@ -762,8 +749,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter modifier = new IndexWriter(dir,
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
dir.failOn(failure.reset());

View File

@ -24,8 +24,6 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -111,11 +109,11 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
public class MockIndexWriter extends IndexWriter {
private class MockIndexWriter extends IndexWriter {
Random r = new java.util.Random(17);
public MockIndexWriter(Directory dir, Analyzer a, boolean create, MaxFieldLength mfl) throws IOException {
super(dir, a, create, mfl);
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
super(dir, conf);
}
@Override
@ -134,10 +132,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testRandomExceptions() throws Throwable {
MockRAMDirectory dir = new MockRAMDirectory();
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setRAMBufferSizeMB(0.1));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.setRAMBufferSizeMB(0.1);
if (DEBUG)
writer.setInfoStream(System.out);
@ -172,10 +169,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testRandomExceptionsThreads() throws Throwable {
MockRAMDirectory dir = new MockRAMDirectory();
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setRAMBufferSizeMB(0.2));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.setRAMBufferSizeMB(0.2);
if (DEBUG)
writer.setInfoStream(System.out);

View File

@ -22,6 +22,7 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
/**
@ -74,10 +75,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
public void testIndexWriterLockRelease() throws IOException {
FSDirectory dir = FSDirectory.open(this.__test_dir);
try {
new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e) {
try {
new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e1) {
}
} finally {

View File

@ -19,9 +19,9 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util._TestUtil;
@ -34,9 +34,8 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNormalCase() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 100; i++) {
@ -51,9 +50,8 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNoOverMerge() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
boolean noOverMerge = false;
@ -73,9 +71,8 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testForceFlush() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
mp.setMinMergeDocs(100);
writer.setMergePolicy(mp);
@ -84,11 +81,11 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
addDoc(writer);
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(mp);
mp.setMinMergeDocs(100);
writer.setMergeFactor(10);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
checkInvariants(writer);
}
@ -99,9 +96,8 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMergeFactorChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 250; i++) {
@ -109,7 +105,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
checkInvariants(writer);
}
writer.setMergeFactor(5);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@ -125,9 +121,8 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMaxBufferedDocsChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(101);
writer.setMergeFactor(101);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(101));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
// leftmost* segment has 1 doc
@ -139,14 +134,17 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
}
writer.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(101);
writer.setMergeFactor(101);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(101));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
}
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(10);
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@ -159,7 +157,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
addDoc(writer);
}
writer.commit();
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
@ -170,10 +168,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMergeDocCount0() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(100);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
for (int i = 0; i < 250; i++) {
addDoc(writer);
@ -185,17 +182,17 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
reader.deleteDocuments(new Term("content", "aaa"));
reader.close();
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(5);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge factor is changed, so check invariants after all adds
for (int i = 0; i < 10; i++) {
addDoc(writer);
}
writer.commit();
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
assertEquals(10, writer.maxDoc());
@ -211,9 +208,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
private void checkInvariants(IndexWriter writer) throws IOException {
_TestUtil.syncConcurrentMerges(writer);
int maxBufferedDocs = writer.getMaxBufferedDocs();
int mergeFactor = writer.getMergeFactor();
int maxMergeDocs = writer.getMaxMergeDocs();
int maxBufferedDocs = writer.getConfig().getMaxBufferedDocs();
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
int maxMergeDocs = ((LogMergePolicy) writer.getMergePolicy()).getMaxMergeDocs();
int ramSegmentCount = writer.getNumBufferedDocuments();
assertTrue(ramSegmentCount < maxBufferedDocs);

View File

@ -20,6 +20,7 @@ import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
@ -56,8 +57,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory merged = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
IndexWriter writer = new IndexWriter(merged, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
writer.optimize();
@ -90,12 +91,13 @@ public class TestIndexWriterMerging extends LuceneTestCase
return fail;
}
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
{
private void fillIndex(Directory dir, int start, int numDocs) throws IOException {
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(
new StandardAnalyzer(TEST_VERSION_CURRENT))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
for (int i = start; i < (start + numDocs); i++)
{

View File

@ -23,7 +23,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Random;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
@ -75,8 +74,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -110,8 +108,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertEquals(0, count(new Term("id", id10), r3));
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);
@ -138,8 +135,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -147,8 +143,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@ -185,14 +180,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@ -220,8 +213,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@ -259,8 +251,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// reopen the writer to verify the delete made it to the directory
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
IndexReader w2r1 = writer.getReader();
assertEquals(0, count(new Term("id", id10), w2r1));
@ -274,8 +265,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
int numDirs = 3;
Directory mainDir = new MockRAMDirectory();
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter mainWriter = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
mainWriter.setInfoStream(infoStream);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
addDirThreads.launchThreads(numDirs);
@ -318,9 +308,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
this.numDirs = numDirs;
this.mainWriter = mainWriter;
addDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(addDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++) {
Document doc = createDocument(i, "addindex", 4);
writer.addDocument(doc);
@ -426,8 +414,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
*/
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
@ -464,8 +451,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
// test whether the changes made it to the directory
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
IndexReader w2r1 = writer.getReader();
// insure the deletes were actually flushed to the directory
assertEquals(200, w2r1.maxDoc());
@ -504,8 +490,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public static void createIndex(Directory dir1, String indexName,
boolean multiSegment) throws IOException {
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
w.setMergePolicy(new LogDocMergePolicy(w));
for (int i = 0; i < 100; i++) {
w.addDocument(createDocument(i, indexName, 4));
@ -539,8 +524,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testMergeWarmer() throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
writer.setInfoStream(infoStream);
// create the index
@ -552,13 +536,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Enroll warmer
MyWarmer warmer = new MyWarmer();
writer.setMergedSegmentWarmer(warmer);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
for (int i = 0; i < 10; i++) {
writer.addDocument(createDocument(i, "test", 4));
}
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
assertTrue(warmer.warmCount > 0);
final int count = warmer.warmCount;
@ -574,8 +557,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testAfterCommit() throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
// create the index
@ -591,7 +573,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
for (int i = 0; i < 10; i++) {
writer.addDocument(createDocument(i, "test", 4));
}
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
IndexReader r2 = r1.reopen();
if (r2 != r1) {
@ -607,8 +589,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Make sure reader remains usable even if IndexWriter closes
public void testAfterClose() throws Exception {
Directory dir1 = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
// create the index
@ -637,10 +618,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Stress test reopen during addIndexes
public void testDuringAddIndexes() throws Exception {
Directory dir1 = new MockRAMDirectory();
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
writer.setMergeFactor(2);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
// create the index
createIndexNoClose(false, "test", writer);
@ -715,10 +695,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Stress test reopen during add/delete
public void testDuringAddDelete() throws Exception {
Directory dir1 = new MockRAMDirectory();
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
writer.setInfoStream(infoStream);
writer.setMergeFactor(2);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
// create the index
createIndexNoClose(false, "test", writer);
@ -796,8 +775,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testExpungeDeletes() throws Throwable {
Directory dir = new MockRAMDirectory();
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@ -821,8 +799,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testDeletesNumDocs() throws Throwable {
Directory dir = new MockRAMDirectory();
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);

View File

@ -17,14 +17,21 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.*;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import java.util.*;
import org.apache.lucene.util.LuceneTestCase;
/**
@ -63,10 +70,10 @@ public class TestLazyBug extends LuceneTestCase {
Directory dir = new RAMDirectory();
try {
Random r = newRandom();
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
for (int d = 1; d <= NUM_DOCS; d++) {
Document doc = new Document();

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.search.IndexSearcher;
@ -60,9 +59,9 @@ public class TestLazyProxSkipping extends LuceneTestCase {
int numDocs = 500;
Directory directory = new SeekCountingDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
writer.setMaxBufferedDocs(10);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
String content;
@ -118,7 +117,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
public void testSeek() throws IOException {
Directory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));

View File

@ -44,8 +44,7 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestMultiLevelSkipList extends LuceneTestCase {
public void testSimpleSkip() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true,
IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new PayloadAnalyzer()));
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++) {
Document d1 = new Document();

View File

@ -19,7 +19,6 @@ package org.apache.lucene.index;
import java.util.Random;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
import org.apache.lucene.store.Directory;
@ -32,13 +31,12 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
public void testIndexing() throws Exception {
Directory mainDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
IndexReader reader = writer.getReader(); // start pooling readers
reader.close();
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(10);
RunThread[] indexThreads = new RunThread[4];
for (int x=0; x < indexThreads.length; x++) {
indexThreads[x] = new RunThread(x % 2, writer);

View File

@ -26,6 +26,7 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.Directory;
@ -99,9 +100,10 @@ public class TestNorms extends LuceneTestCase {
Directory dir3 = new RAMDirectory();
createIndex(dir3);
IndexWriter iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(5));
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.addIndexesNoOptimize(new Directory[]{dir1,dir2});
iw.optimize();
iw.close();
@ -117,9 +119,9 @@ public class TestNorms extends LuceneTestCase {
doTestNorms(dir3);
// now with optimize
iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT)
.setOpenMode(OpenMode.APPEND).setAnalyzer(anlzr).setMaxBufferedDocs(5));
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.optimize();
iw.close();
verifyIndex(dir3);
@ -143,11 +145,13 @@ public class TestNorms extends LuceneTestCase {
}
private void createIndex(Directory dir) throws IOException {
IndexWriter iw = new IndexWriter(dir,anlzr,true, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);
iw.setUseCompoundFile(true);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(anlzr)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(true);
lmp.setUseCompoundDocStore(true);
iw.close();
}
@ -185,11 +189,13 @@ public class TestNorms extends LuceneTestCase {
}
private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
IndexWriter iw = new IndexWriter(dir,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
iw.setMaxBufferedDocs(5);
iw.setMergeFactor(3);
iw.setSimilarity(similarityOne);
iw.setUseCompoundFile(compound);
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setAnalyzer(anlzr)
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
lmp.setMergeFactor(3);
lmp.setUseCompoundFile(compound);
lmp.setUseCompoundDocStore(compound);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
}

View File

@ -67,7 +67,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
Document d = new Document();
// this field will have Tf
@ -113,9 +113,9 @@ public class TestOmitTf extends LuceneTestCase {
public void testMixedMerge() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(3);
writer.setMergeFactor(2);
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxBufferedDocs(3));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
Document d = new Document();
// this field will have Tf
@ -166,9 +166,9 @@ public class TestOmitTf extends LuceneTestCase {
public void testMixedRAM() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(2);
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
Document d = new Document();
// this field will have Tf
@ -214,10 +214,12 @@ public class TestOmitTf extends LuceneTestCase {
public void testNoPrxFile() throws Throwable {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(3);
writer.setMergeFactor(2);
writer.setUseCompoundFile(false);
IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxBufferedDocs(3));
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
lmp.setMergeFactor(2);
lmp.setUseCompoundFile(false);
lmp.setUseCompoundDocStore(false);
Document d = new Document();
Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
@ -245,10 +247,10 @@ public class TestOmitTf extends LuceneTestCase {
public void testBasic() throws Exception {
Directory dir = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);
writer.setSimilarity(new SimpleSimilarity());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxBufferedDocs(2)
.setSimilarity(new SimpleSimilarity()));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
StringBuilder sb = new StringBuilder(265);

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.MapFieldSelector;
@ -106,7 +105,7 @@ public class TestParallelReader extends LuceneTestCase {
// one document only:
Directory dir2 = new MockRAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
w2.addDocument(d3);
@ -151,13 +150,13 @@ public class TestParallelReader extends LuceneTestCase {
Directory dir2 = getDir2();
// add another document to ensure that the indexes are not optimized
IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
IndexWriter modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d = new Document();
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
modifier.close();
modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
d = new Document();
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
@ -170,7 +169,7 @@ public class TestParallelReader extends LuceneTestCase {
assertFalse(pr.isOptimized());
pr.close();
modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
modifier.optimize();
modifier.close();
@ -182,7 +181,7 @@ public class TestParallelReader extends LuceneTestCase {
pr.close();
modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
modifier.optimize();
modifier.close();
@ -233,7 +232,7 @@ public class TestParallelReader extends LuceneTestCase {
// Fields 1-4 indexed together:
private Searcher single() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@ -263,7 +262,7 @@ public class TestParallelReader extends LuceneTestCase {
private Directory getDir1() throws IOException {
Directory dir1 = new MockRAMDirectory();
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@ -278,7 +277,7 @@ public class TestParallelReader extends LuceneTestCase {
private Directory getDir2() throws IOException {
Directory dir2 = new RAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));

Some files were not shown because too many files have changed in this diff Show More