mirror of https://github.com/apache/lucene.git
LUCENE-2294: revert while discussions are going on about default analyzer...
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@921532 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
099ba4e9ea
commit
b51587f72d
|
@ -76,15 +76,6 @@ API Changes
|
|||
use by external code. In addition it offers a matchExtension method which
|
||||
callers can use to query whether a certain file matches a certain extension.
|
||||
(Shai Erera via Mike McCandless)
|
||||
|
||||
* LUCENE-2294: IndexWriter constructors have been deprecated in favor of a
|
||||
single ctor which accepts IndexWriterConfig and a Directory. You can set all
|
||||
the parameters related to IndexWriter on IndexWriterConfig. The different
|
||||
setter/getter methods were deprecated as well. One should call
|
||||
writer.getConfig().getXYZ() to query for a parameter XYZ.
|
||||
Additionally, the setter/getter related to MergePolicy were deprecated as
|
||||
well. One should interact with the MergePolicy directly.
|
||||
(Shai Erera via Mike McCandless)
|
||||
|
||||
* LUCENE-124: Add a TopTermsBoostOnlyBooleanQueryRewrite to MultiTermQuery.
|
||||
This rewrite method is similar to TopTermsScoringBooleanQueryRewrite, but
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
|
@ -52,7 +51,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
|
|||
super.setUp();
|
||||
dir = new RAMDirectory();
|
||||
appAnalyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(appAnalyzer));
|
||||
IndexWriter writer = new IndexWriter(dir, appAnalyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
int numDocs = 200;
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
|
@ -60,7 +59,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
|
|||
*/
|
||||
public IndexSearcher setUpSearcher(Analyzer analyzer) throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc;
|
||||
doc = new Document();
|
||||
|
|
|
@ -38,10 +38,7 @@ import org.apache.lucene.document.DateTools;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Searcher;
|
||||
|
@ -283,17 +280,15 @@ public class IndexTask extends Task {
|
|||
|
||||
log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setAnalyzer(analyzer).setOpenMode(
|
||||
create ? OpenMode.CREATE : OpenMode.APPEND));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundIndex);
|
||||
lmp.setUseCompoundDocStore(useCompoundIndex);
|
||||
lmp.setMergeFactor(mergeFactor);
|
||||
IndexWriter writer =
|
||||
new IndexWriter(dir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setUseCompoundFile(useCompoundIndex);
|
||||
int totalFiles = 0;
|
||||
int totalIndexed = 0;
|
||||
int totalIgnored = 0;
|
||||
try {
|
||||
writer.setMergeFactor(mergeFactor);
|
||||
|
||||
for (int i = 0; i < rcs.size(); i++) {
|
||||
ResourceCollection rc = rcs.elementAt(i);
|
||||
|
|
|
@ -21,12 +21,9 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
|
|||
import org.apache.lucene.benchmark.byTask.utils.Config;
|
||||
import org.apache.lucene.index.IndexDeletionPolicy;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MergeScheduler;
|
||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||
import org.apache.lucene.index.MergePolicy;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
|
@ -102,7 +99,7 @@ public class CreateIndexTask extends PerfTask {
|
|||
|
||||
final double ramBuffer = config.get("ram.flush.mb",OpenIndexTask.DEFAULT_RAM_FLUSH_MB);
|
||||
final int maxBuffered = config.get("max.buffered",OpenIndexTask.DEFAULT_MAX_BUFFERED);
|
||||
if (maxBuffered == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
|
||||
if (maxBuffered == IndexWriter.DISABLE_AUTO_FLUSH) {
|
||||
writer.setRAMBufferSizeMB(ramBuffer);
|
||||
writer.setMaxBufferedDocs(maxBuffered);
|
||||
} else {
|
||||
|
@ -150,9 +147,10 @@ public class CreateIndexTask extends PerfTask {
|
|||
Config config = runData.getConfig();
|
||||
|
||||
IndexWriter writer = new IndexWriter(runData.getDirectory(),
|
||||
new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(
|
||||
runData.getAnalyzer()).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(getIndexDeletionPolicy(config)));
|
||||
runData.getAnalyzer(),
|
||||
true,
|
||||
getIndexDeletionPolicy(config),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
setIndexWriterConfig(writer, config);
|
||||
runData.setIndexWriter(writer);
|
||||
return 1;
|
||||
|
|
|
@ -21,9 +21,7 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
|
|||
import org.apache.lucene.benchmark.byTask.utils.Config;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -41,10 +39,10 @@ import java.io.IOException;
|
|||
*/
|
||||
public class OpenIndexTask extends PerfTask {
|
||||
|
||||
public static final int DEFAULT_MAX_BUFFERED = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
|
||||
public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
|
||||
public static final int DEFAULT_MAX_BUFFERED = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
|
||||
public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
|
||||
public static final int DEFAULT_MERGE_PFACTOR = LogMergePolicy.DEFAULT_MERGE_FACTOR;
|
||||
public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
|
||||
public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB;
|
||||
private String commitUserData;
|
||||
|
||||
public OpenIndexTask(PerfRunData runData) {
|
||||
|
@ -63,9 +61,10 @@ public class OpenIndexTask extends PerfTask {
|
|||
}
|
||||
|
||||
IndexWriter writer = new IndexWriter(runData.getDirectory(),
|
||||
new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(
|
||||
runData.getAnalyzer()).setIndexDeletionPolicy(
|
||||
CreateIndexTask.getIndexDeletionPolicy(config)).setIndexCommit(ic));
|
||||
runData.getAnalyzer(),
|
||||
CreateIndexTask.getIndexDeletionPolicy(config),
|
||||
IndexWriter.MaxFieldLength.UNLIMITED,
|
||||
ic);
|
||||
CreateIndexTask.setIndexWriterConfig(writer, config);
|
||||
runData.setIndexWriter(writer);
|
||||
return 1;
|
||||
|
|
|
@ -36,15 +36,12 @@ import org.apache.lucene.benchmark.byTask.stats.TaskStats;
|
|||
import org.apache.lucene.collation.CollationKeyAnalyzer;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
import org.apache.lucene.index.SerialMergeScheduler;
|
||||
import org.apache.lucene.index.LogDocMergePolicy;
|
||||
import org.apache.lucene.index.TermFreqVector;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.search.FieldCache.StringIndex;
|
||||
import org.apache.lucene.search.FieldCache;
|
||||
|
@ -99,9 +96,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
assertEquals("TestSearchTask was supposed to be called!",279,CountingSearchTestTask.numSearches);
|
||||
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
|
||||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
|
||||
|
@ -187,7 +182,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
|
||||
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
|
||||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
|
||||
|
@ -226,7 +221,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
|
||||
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
|
||||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
|
||||
|
@ -299,7 +294,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches);
|
||||
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
|
||||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
|
||||
|
@ -422,9 +417,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
benchmark = execBenchmark(algLines2);
|
||||
|
||||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
iw.close();
|
||||
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
|
@ -662,9 +655,7 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
// 2. execute the algorithm (required in every "logic" test)
|
||||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
assertTrue("did not use the specified MergeScheduler",
|
||||
((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getConfig()
|
||||
.getMergeScheduler()).called);
|
||||
assertTrue("did not use the specified MergeScheduler", ((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getMergeScheduler()).called);
|
||||
benchmark.getRunData().getIndexWriter().close();
|
||||
|
||||
// 3. test number of docs in the index
|
||||
|
@ -752,10 +743,10 @@ public class TestPerfTasksLogic extends LuceneTestCase {
|
|||
// 2. execute the algorithm (required in every "logic" test)
|
||||
Benchmark benchmark = execBenchmark(algLines);
|
||||
final IndexWriter writer = benchmark.getRunData().getIndexWriter();
|
||||
assertEquals(2, writer.getConfig().getMaxBufferedDocs());
|
||||
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB());
|
||||
assertEquals(3, ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor());
|
||||
assertFalse(((LogMergePolicy) writer.getMergePolicy()).getUseCompoundFile());
|
||||
assertEquals(2, writer.getMaxBufferedDocs());
|
||||
assertEquals(IndexWriter.DISABLE_AUTO_FLUSH, (int) writer.getRAMBufferSizeMB());
|
||||
assertEquals(3, writer.getMergeFactor());
|
||||
assertFalse(writer.getUseCompoundFile());
|
||||
writer.close();
|
||||
Directory dir = benchmark.getRunData().getDirectory();
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
|
|
|
@ -30,10 +30,10 @@ import org.apache.lucene.document.Field.Store;
|
|||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.TermFreqVector;
|
||||
import org.apache.lucene.index.TermPositionVector;
|
||||
import org.apache.lucene.index.TermVectorOffsetInfo;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -50,13 +50,13 @@ public class FieldTermStack {
|
|||
LinkedList<TermInfo> termList = new LinkedList<TermInfo>();
|
||||
|
||||
public static void main( String[] args ) throws Exception {
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
Analyzer analyzer = new WhitespaceAnalyzer();
|
||||
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "f", analyzer );
|
||||
Query query = parser.parse( "a x:b" );
|
||||
FieldQuery fieldQuery = new FieldQuery( query, true, false );
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter( dir, analyzer, MaxFieldLength.LIMITED );
|
||||
Document doc = new Document();
|
||||
doc.add( new Field( "f", "a a a b b c a b b c d e f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
|
||||
doc.add( new Field( "f", "b a b a f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
|
||||
|
|
|
@ -35,9 +35,8 @@ import org.apache.lucene.document.Field.Store;
|
|||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
|
@ -327,9 +326,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
|
|||
|
||||
// make 1 doc with multi valued field
|
||||
protected void make1dmfIndex( Analyzer analyzer, String... values ) throws Exception {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer)
|
||||
.setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter( dir, analyzer, true, MaxFieldLength.LIMITED );
|
||||
Document doc = new Document();
|
||||
for( String value: values )
|
||||
doc.add( new Field( F, value, Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
|
||||
|
@ -341,9 +338,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
|
|||
|
||||
// make 1 doc with multi valued & not analyzed field
|
||||
protected void make1dmfIndexNA( String... values ) throws Exception {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
|
||||
analyzerK));
|
||||
IndexWriter writer = new IndexWriter( dir, analyzerK, true, MaxFieldLength.LIMITED );
|
||||
Document doc = new Document();
|
||||
for( String value: values )
|
||||
doc.add( new Field( F, value, Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
|
||||
|
|
|
@ -24,8 +24,7 @@ import org.apache.lucene.document.Field.Store;
|
|||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
||||
public class SimpleFragmentsBuilderTest extends AbstractTestCase {
|
||||
|
@ -119,9 +118,7 @@ public class SimpleFragmentsBuilderTest extends AbstractTestCase {
|
|||
}
|
||||
|
||||
protected void makeUnstoredIndex() throws Exception {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
|
||||
analyzerW));
|
||||
IndexWriter writer = new IndexWriter( dir, analyzerW, true, MaxFieldLength.LIMITED );
|
||||
Document doc = new Document();
|
||||
doc.add( new Field( F, "aaa", Store.NO, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
|
||||
writer.addDocument( doc );
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
|
||||
|
@ -32,9 +33,9 @@ import org.apache.lucene.document.Field.TermVector;
|
|||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermPositionVector;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
|
@ -58,7 +59,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox jumped";
|
||||
final Directory directory = new RAMDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamConcurrent(),
|
||||
|
@ -101,7 +102,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox jumped";
|
||||
final Directory directory = new RAMDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamConcurrent(),
|
||||
|
@ -170,7 +171,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = new RAMDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamSparse(),
|
||||
|
@ -212,7 +213,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = new RAMDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, TEXT, Store.YES, Index.ANALYZED,
|
||||
|
@ -252,7 +253,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
final String TEXT = "the fox did not jump";
|
||||
final Directory directory = new RAMDirectory();
|
||||
final IndexWriter indexWriter = new IndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
|
||||
try {
|
||||
final Document document = new Document();
|
||||
document.add(new Field(FIELD, new TokenStreamSparse(),
|
||||
|
|
|
@ -51,9 +51,8 @@ import org.apache.lucene.document.Field.Index;
|
|||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
|
@ -81,6 +80,7 @@ import org.apache.lucene.search.spans.SpanTermQuery;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
|
@ -89,6 +89,8 @@ import org.w3c.dom.NodeList;
|
|||
*
|
||||
*/
|
||||
public class HighlighterTest extends BaseTokenStreamTestCase implements Formatter {
|
||||
// TODO: change to CURRENT, does not work because posIncr:
|
||||
static final Version TEST_VERSION = TEST_VERSION_CURRENT;
|
||||
|
||||
private IndexReader reader;
|
||||
static final String FIELD_NAME = "contents";
|
||||
|
@ -97,7 +99,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
RAMDirectory ramDir;
|
||||
public IndexSearcher searcher = null;
|
||||
int numHighlights = 0;
|
||||
final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||
final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
|
||||
TopDocs hits;
|
||||
|
||||
String[] texts = {
|
||||
|
@ -118,7 +120,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
public void testQueryScorerHits() throws Exception {
|
||||
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
|
||||
query = qp.parse("\"very long\"");
|
||||
searcher = new IndexSearcher(ramDir, true);
|
||||
TopDocs hits = searcher.search(query, 10);
|
||||
|
@ -148,7 +150,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
String s1 = "I call our world Flatland, not because we call it so,";
|
||||
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
|
||||
|
||||
// Verify that a query against the default field results in text being
|
||||
// highlighted
|
||||
|
@ -180,7 +182,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
*/
|
||||
private static String highlightField(Query query, String fieldName, String text)
|
||||
throws IOException, InvalidTokenOffsetsException {
|
||||
TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION_CURRENT).tokenStream(fieldName, new StringReader(text));
|
||||
TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION).tokenStream(fieldName, new StringReader(text));
|
||||
// Assuming "<B>", "</B>" used to highlight
|
||||
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
|
||||
QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
|
||||
|
@ -226,7 +228,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
|
||||
+ " OR " + f2c + ph2 + ")";
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
|
||||
QueryParser qp = new QueryParser(TEST_VERSION, f1, analyzer);
|
||||
Query query = qp.parse(q);
|
||||
|
||||
QueryScorer scorer = new QueryScorer(query, f1);
|
||||
|
@ -676,7 +678,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
// Need to explicitly set the QueryParser property to use TermRangeQuery
|
||||
// rather
|
||||
// than RangeFilters
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
|
||||
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
||||
query = parser.parse(queryString);
|
||||
doSearching(query);
|
||||
|
@ -1026,7 +1028,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
String srchkey = "football";
|
||||
|
||||
String s = "football-soccer in the euro 2004 footie competition";
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "bookid", analyzer);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, "bookid", analyzer);
|
||||
Query query = parser.parse(srchkey);
|
||||
|
||||
TokenStream tokenStream = analyzer.tokenStream(null, new StringReader(s));
|
||||
|
@ -1152,13 +1154,13 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
sb.append(stopWords.iterator().next());
|
||||
}
|
||||
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
|
||||
Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords).tokenStream(
|
||||
Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION, stopWords).tokenStream(
|
||||
"data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
|
||||
// new
|
||||
// QueryTermScorer(query));
|
||||
hg.setTextFragmenter(new NullFragmenter());
|
||||
hg.setMaxDocCharsToAnalyze(100);
|
||||
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
|
||||
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
|
||||
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
|
||||
.getMaxDocCharsToAnalyze());
|
||||
|
||||
|
@ -1169,7 +1171,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
// + whitespace)
|
||||
sb.append(" ");
|
||||
sb.append(goodWord);
|
||||
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
|
||||
match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
|
||||
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
|
||||
.getMaxDocCharsToAnalyze());
|
||||
}
|
||||
|
@ -1190,11 +1192,11 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
String text = "this is a text with searchterm in it";
|
||||
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
|
||||
Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION_CURRENT,
|
||||
Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION,
|
||||
stopWords).tokenStream("text", new StringReader(text)), fm);
|
||||
hg.setTextFragmenter(new NullFragmenter());
|
||||
hg.setMaxDocCharsToAnalyze(36);
|
||||
String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "text", text);
|
||||
String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "text", text);
|
||||
assertTrue(
|
||||
"Matched text should contain remainder of text after highlighted query ",
|
||||
match.endsWith("in it"));
|
||||
|
@ -1211,9 +1213,9 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
numHighlights = 0;
|
||||
// test to show how rewritten query can still be used
|
||||
searcher = new IndexSearcher(ramDir, true);
|
||||
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
|
||||
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
|
||||
Query query = parser.parse("JF? or Kenned*");
|
||||
System.out.println("Searching with primitive query");
|
||||
// forget to set this and...
|
||||
|
@ -1324,9 +1326,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
public void testMultiSearcher() throws Exception {
|
||||
// setup index 1
|
||||
RAMDirectory ramDir1 = new RAMDirectory();
|
||||
IndexWriter writer1 = new IndexWriter(ramDir1, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer1 = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document d = new Document();
|
||||
Field f = new Field(FIELD_NAME, "multiOne", Field.Store.YES, Field.Index.ANALYZED);
|
||||
d.add(f);
|
||||
|
@ -1337,9 +1337,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
// setup index 2
|
||||
RAMDirectory ramDir2 = new RAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(ramDir2, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer2 = new IndexWriter(ramDir2, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
d = new Document();
|
||||
f = new Field(FIELD_NAME, "multiTwo", Field.Store.YES, Field.Index.ANALYZED);
|
||||
d.add(f);
|
||||
|
@ -1352,7 +1350,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
searchers[0] = new IndexSearcher(ramDir1, true);
|
||||
searchers[1] = new IndexSearcher(ramDir2, true);
|
||||
MultiSearcher multiSearcher = new MultiSearcher(searchers);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
|
||||
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
||||
query = parser.parse("multi*");
|
||||
System.out.println("Searching for: " + query.toString(FIELD_NAME));
|
||||
|
@ -1386,7 +1384,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
@Override
|
||||
public void run() throws Exception {
|
||||
String docMainText = "fred is one of the people";
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
|
||||
Query query = parser.parse("fred category:people");
|
||||
|
||||
// highlighting respects fieldnames used in query
|
||||
|
@ -1532,64 +1530,64 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
Highlighter highlighter;
|
||||
String result;
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-Speed10 <B>foo</B>", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-Speed<B>10</B> foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi</B>-Speed10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("Hi-<B>Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
// ///////////////// same tests, just put the bigger overlapping token
|
||||
// first
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-Speed10 <B>foo</B>", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-Speed<B>10</B> foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi</B>-Speed10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("Hi-<B>Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
||||
query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
|
||||
query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
|
||||
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
|
||||
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
|
||||
assertEquals("<B>Hi-Speed</B>10 foo", result);
|
||||
|
@ -1615,7 +1613,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void makeIndex() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter( dir, a, MaxFieldLength.LIMITED );
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
|
||||
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
|
||||
|
@ -1625,7 +1623,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
private void deleteDocument() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter( dir, a, false, MaxFieldLength.LIMITED );
|
||||
writer.deleteDocuments( new Term( "t_text1", "del" ) );
|
||||
// To see negative idf, keep comment the following line
|
||||
//writer.optimize();
|
||||
|
@ -1634,7 +1632,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
private void searchIndex() throws IOException, ParseException, InvalidTokenOffsetsException {
|
||||
String q = "t_text1:random";
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "t_text1", a );
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, "t_text1", a );
|
||||
Query query = parser.parse( q );
|
||||
IndexSearcher searcher = new IndexSearcher( dir, true );
|
||||
// This scorer can return negative idf -> null fragment
|
||||
|
@ -1688,7 +1686,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
public void doSearching(String queryString) throws Exception {
|
||||
QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
|
||||
QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
|
||||
parser.setEnablePositionIncrements(true);
|
||||
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
|
||||
query = parser.parse(queryString);
|
||||
|
@ -1727,9 +1725,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
ramDir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(ramDir, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < texts.length; i++) {
|
||||
addDoc(writer, texts[i]);
|
||||
}
|
||||
|
|
|
@ -16,12 +16,9 @@
|
|||
|
||||
package org.apache.lucene.store.instantiated;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -29,9 +26,11 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
public class TestEmptyIndex extends LuceneTestCase {
|
||||
import java.util.Arrays;
|
||||
import java.io.IOException;
|
||||
|
||||
public class TestEmptyIndex extends TestCase {
|
||||
|
||||
public void testSearch() throws Exception {
|
||||
|
||||
|
@ -61,7 +60,7 @@ public class TestEmptyIndex extends LuceneTestCase {
|
|||
// make sure a Directory acts the same
|
||||
|
||||
Directory d = new RAMDirectory();
|
||||
new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)).close();
|
||||
new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
|
||||
r = IndexReader.open(d, false);
|
||||
testNorms(r);
|
||||
r.close();
|
||||
|
@ -94,7 +93,7 @@ public class TestEmptyIndex extends LuceneTestCase {
|
|||
// make sure a Directory acts the same
|
||||
|
||||
Directory d = new RAMDirectory();
|
||||
new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)).close();
|
||||
new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
|
||||
r = IndexReader.open(d, false);
|
||||
termEnumTest(r);
|
||||
r.close();
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Payload;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
|
@ -61,9 +60,7 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
RAMDirectory dir = new RAMDirectory();
|
||||
|
||||
// create dir data
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < 20; i++) {
|
||||
Document document = new Document();
|
||||
assembleDocument(document, i);
|
||||
|
@ -87,9 +84,7 @@ public class TestIndicesEquals extends LuceneTestCase {
|
|||
InstantiatedIndex ii = new InstantiatedIndex();
|
||||
|
||||
// create dir data
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < 500; i++) {
|
||||
Document document = new Document();
|
||||
assembleDocument(document, i);
|
||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
||||
|
@ -35,7 +35,7 @@ public class TestSerialization extends LuceneTestCase {
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("foo", "bar rab abr bra rba", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
doc.add(new Field("moo", "bar rab abr bra rba", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
|
|
|
@ -18,11 +18,10 @@ package org.apache.lucene.store.instantiated;
|
|||
import java.io.IOException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
||||
|
@ -33,17 +32,17 @@ public class TestUnoptimizedReaderOnConstructor extends LuceneTestCase {
|
|||
|
||||
public void test() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
addDocument(iw, "Hello, world!");
|
||||
addDocument(iw, "All work and no play makes jack a dull boy");
|
||||
iw.close();
|
||||
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
addDocument(iw, "Hello, tellus!");
|
||||
addDocument(iw, "All work and no play makes danny a dull boy");
|
||||
iw.close();
|
||||
|
||||
iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
addDocument(iw, "Hello, earth!");
|
||||
addDocument(iw, "All work and no play makes wendy a dull girl");
|
||||
iw.close();
|
||||
|
|
|
@ -42,11 +42,9 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.index.IndexReader.FieldOption;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.search.Collector;
|
||||
|
@ -171,9 +169,7 @@ class LuceneMethods {
|
|||
|
||||
public void optimize() throws IOException {
|
||||
//open the index writer. False: don't create a new one
|
||||
IndexWriter indexWriter = new IndexWriter(indexName, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setAnalyzer(createAnalyzer()).setOpenMode(
|
||||
OpenMode.APPEND));
|
||||
IndexWriter indexWriter = new IndexWriter(indexName, createAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
message("Starting to optimize index.");
|
||||
long start = System.currentTimeMillis();
|
||||
indexWriter.optimize();
|
||||
|
|
|
@ -42,7 +42,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.Collector;
|
||||
|
@ -411,7 +410,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
|
|||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = null;
|
||||
try {
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.optimize();
|
||||
return dir;
|
||||
|
|
|
@ -21,11 +21,11 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* This tool splits input index into multiple equal parts. The method employed
|
||||
|
@ -88,7 +88,8 @@ public class MultiPassIndexSplitter {
|
|||
}
|
||||
}
|
||||
}
|
||||
IndexWriter w = new IndexWriter(outputs[i], new IndexWriterConfig(Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter w = new IndexWriter(outputs[i], new WhitespaceAnalyzer(),
|
||||
true, MaxFieldLength.UNLIMITED);
|
||||
System.err.println("Writing part " + (i + 1) + " ...");
|
||||
w.addIndexes(new IndexReader[]{input});
|
||||
w.close();
|
||||
|
|
|
@ -17,11 +17,9 @@ package org.apache.lucene.misc;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -38,8 +36,7 @@ public class IndexMergeTool {
|
|||
}
|
||||
FSDirectory mergedIndex = FSDirectory.open(new File(args[0]));
|
||||
|
||||
IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(mergedIndex, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Directory[] indexes = new Directory[args.length - 1];
|
||||
for (int i = 1; i < args.length; i++) {
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Arrays;
|
|||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -57,9 +58,7 @@ public class TestFieldNormModifier extends LuceneTestCase {
|
|||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -18,8 +18,9 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -34,7 +35,7 @@ public class TestIndexSplitter extends LuceneTestCase {
|
|||
_TestUtil.rmDir(destDir);
|
||||
destDir.mkdirs();
|
||||
FSDirectory fsDir = FSDirectory.open(dir);
|
||||
IndexWriter iw = new IndexWriter(fsDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter iw = new IndexWriter(fsDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
|
||||
for (int x=0; x < 100; x++) {
|
||||
Document doc = TestIndexWriterReader.createDocument(x, "index", 5);
|
||||
iw.addDocument(doc);
|
||||
|
|
|
@ -16,8 +16,10 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -30,7 +32,8 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
MaxFieldLength.LIMITED);
|
||||
Document doc;
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
doc = new Document();
|
||||
|
|
|
@ -28,7 +28,7 @@ public class TestTermVectorAccessor extends LuceneTestCase {
|
|||
public void test() throws Exception {
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet())));
|
||||
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc;
|
||||
|
||||
|
|
|
@ -20,11 +20,13 @@ package org.apache.lucene.misc;
|
|||
import java.util.Calendar;
|
||||
import java.util.GregorianCalendar;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.CachingWrapperFilter;
|
||||
|
@ -56,7 +58,8 @@ public class ChainedFilterTest extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer =
|
||||
new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Calendar cal = new GregorianCalendar();
|
||||
cal.clear();
|
||||
|
@ -184,7 +187,9 @@ public class ChainedFilterTest extends LuceneTestCase {
|
|||
|
||||
public void testWithCachingFilter() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
|
||||
Searcher searcher = new IndexSearcher(dir, true);
|
||||
|
|
|
@ -19,13 +19,14 @@ package org.apache.lucene.misc;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.FieldNormModifier;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -60,7 +61,7 @@ public class TestLengthNormModifier extends LuceneTestCase {
|
|||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -113,7 +113,7 @@ public class TestComplexPhraseQuery extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
RAMDirectory rd = new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter w = new IndexWriter(rd, analyzer, MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < docsContent.length; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("name", docsContent[i].name, Field.Store.YES,
|
||||
|
|
|
@ -19,11 +19,11 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -36,7 +36,7 @@ public class BooleanFilterTest extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
|
||||
addDoc(writer, "admin guest", "010", "20040101","Y");
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -42,7 +41,7 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
//Add series of docs with filterable fields : url, text and dates flags
|
||||
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
|
||||
|
|
|
@ -25,8 +25,8 @@ import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
|
@ -39,8 +39,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer,true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
//Add series of docs with misspelt names
|
||||
addDoc(writer, "jonathon smythe","1");
|
||||
|
|
|
@ -19,12 +19,13 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
|
@ -53,8 +54,9 @@ public class TermsFilterTest extends LuceneTestCase {
|
|||
{
|
||||
String fieldName="field1";
|
||||
RAMDirectory rd=new RAMDirectory();
|
||||
IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
IndexWriter w=new IndexWriter(rd,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
Document doc=new Document();
|
||||
int term=i*10; //terms are units of 10;
|
||||
doc.add(new Field(fieldName,""+term,Field.Store.YES,Field.Index.NOT_ANALYZED));
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -45,7 +45,8 @@ public class TestMoreLikeThis extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
true, MaxFieldLength.UNLIMITED);
|
||||
|
||||
// Add series of docs with specific information for MoreLikeThis
|
||||
addDoc(writer, "lucene");
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.queryParser.core.QueryNodeException;
|
||||
import org.apache.lucene.queryParser.standard.config.DefaultOperatorAttribute.Operator;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
|
@ -320,7 +319,8 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
|
|||
public void testStopWordSearching() throws Exception {
|
||||
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
|
||||
Directory ramDir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter iw = new IndexWriter(ramDir, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("body", "blah the footest blah", Field.Store.NO,
|
||||
Field.Index.ANALYZED));
|
||||
|
|
|
@ -40,11 +40,7 @@ import org.apache.lucene.store.RAMDirectory;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
/**
|
||||
* Tests multi field query parsing using the
|
||||
* {@link MultiFieldQueryParserWrapper}.
|
||||
*
|
||||
* @deprecated this tests test the deprecated MultiFieldQueryParserWrapper, so
|
||||
* when the latter is gone, so should this test.
|
||||
* Tests multi field query parsing using the {@link MultiFieldQueryParserWrapper}.
|
||||
*/
|
||||
public class TestMultiFieldQueryParserWrapper extends LuceneTestCase {
|
||||
|
||||
|
|
|
@ -51,7 +51,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.messages.MessageImpl;
|
||||
import org.apache.lucene.queryParser.core.QueryNodeException;
|
||||
|
@ -572,7 +571,8 @@ public class TestQPHelper extends LocalizedTestCase {
|
|||
public void testFarsiRangeCollating() throws Exception {
|
||||
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
|
||||
Field.Index.NOT_ANALYZED));
|
||||
|
@ -994,7 +994,8 @@ public class TestQPHelper extends LocalizedTestCase {
|
|||
public void testLocalDateFormat() throws IOException, QueryNodeException {
|
||||
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
|
||||
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
|
||||
iw.close();
|
||||
|
@ -1192,7 +1193,7 @@ public class TestQPHelper extends LocalizedTestCase {
|
|||
|
||||
public void testMultiPhraseQuery() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new CannedAnalyzer()));
|
||||
IndexWriter w = new IndexWriter(dir, new CannedAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "", Field.Store.NO, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
|
|
@ -78,9 +78,6 @@ import org.apache.lucene.util.LocalizedTestCase;
|
|||
* to use new {@link QueryParserWrapper} instead of the old query parser.
|
||||
*
|
||||
* Tests QueryParser.
|
||||
*
|
||||
* @deprecated this entire test case tests QueryParserWrapper which is
|
||||
* deprecated. When QPW is gone, so will the test.
|
||||
*/
|
||||
public class TestQueryParserWrapper extends LocalizedTestCase {
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@ package org.apache.lucene.search.regex;
|
|||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -40,7 +40,8 @@ public class TestRegexQuery extends LuceneTestCase {
|
|||
super.setUp();
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field(FN, "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
|
|
@ -19,13 +19,13 @@ package org.apache.lucene.search.regex;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MultiSearcher;
|
||||
import org.apache.lucene.search.spans.SpanFirstQuery;
|
||||
|
@ -44,7 +44,7 @@ public class TestSpanRegexQuery extends LuceneTestCase {
|
|||
|
||||
public void testSpanRegex() throws Exception {
|
||||
RAMDirectory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
// doc.add(new Field("field", "the quick brown fox jumps over the lazy dog",
|
||||
// Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -109,15 +109,15 @@ public class TestSpanRegexQuery extends LuceneTestCase {
|
|||
Field.Index.ANALYZED_NO_NORMS));
|
||||
|
||||
// creating first index writer
|
||||
IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writerA.addDocument(lDoc);
|
||||
writerA.optimize();
|
||||
writerA.close();
|
||||
|
||||
// creating second index writer
|
||||
IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writerB.addDocument(lDoc2);
|
||||
writerB.optimize();
|
||||
writerB.close();
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.analysis.SimpleAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
|
@ -58,9 +57,8 @@ public class TestRemoteCachingWrapperFilter extends LuceneTestCase {
|
|||
private static void startServer() throws Exception {
|
||||
// construct an index
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
|
||||
doc.add(new Field("type", "A", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -19,9 +19,9 @@ package org.apache.lucene.search;
|
|||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
|
@ -58,7 +58,7 @@ public class TestRemoteSearchable extends LuceneTestCase {
|
|||
private static void startServer() throws Exception {
|
||||
// construct an index
|
||||
RAMDirectory indexStore = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(indexStore,new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(indexStore,new SimpleAnalyzer(TEST_VERSION_CURRENT),true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
|
||||
doc.add(new Field("other", "other test text", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -30,12 +30,11 @@ import junit.framework.Test;
|
|||
import junit.framework.TestSuite;
|
||||
import junit.textui.TestRunner;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -110,9 +109,9 @@ public class TestRemoteSort extends LuceneTestCase implements Serializable {
|
|||
private Searcher getIndex (boolean even, boolean odd)
|
||||
throws IOException {
|
||||
RAMDirectory indexStore = new RAMDirectory ();
|
||||
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(1000);
|
||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(1000);
|
||||
for (int i=0; i<data.length; ++i) {
|
||||
if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -21,11 +21,11 @@ import java.util.LinkedList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -71,7 +71,7 @@ public class TestCartesian extends LuceneTestCase {
|
|||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
setUpPlotter( 2, 15);
|
||||
|
||||
|
|
|
@ -18,10 +18,10 @@ package org.apache.lucene.spatial.tier;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.QueryWrapperFilter;
|
||||
|
@ -44,7 +44,7 @@ public class TestDistance extends LuceneTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
addData(writer);
|
||||
|
||||
}
|
||||
|
|
|
@ -20,14 +20,12 @@ package org.apache.lucene.search.spell;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -36,7 +34,6 @@ import org.apache.lucene.search.ScoreDoc;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -142,7 +139,8 @@ public class SpellChecker implements java.io.Closeable {
|
|||
synchronized (modifyCurrentIndexLock) {
|
||||
ensureOpen();
|
||||
if (!IndexReader.indexExists(spellIndexDir)) {
|
||||
IndexWriter writer = new IndexWriter(spellIndexDir, new IndexWriterConfig(Version.LUCENE_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(spellIndexDir, null, true,
|
||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.close();
|
||||
}
|
||||
swapSearcher(spellIndexDir);
|
||||
|
@ -355,8 +353,7 @@ public class SpellChecker implements java.io.Closeable {
|
|||
synchronized (modifyCurrentIndexLock) {
|
||||
ensureOpen();
|
||||
final Directory dir = this.spellIndex;
|
||||
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
final IndexWriter writer = new IndexWriter(dir, null, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.close();
|
||||
swapSearcher(dir);
|
||||
}
|
||||
|
@ -391,8 +388,10 @@ public class SpellChecker implements java.io.Closeable {
|
|||
synchronized (modifyCurrentIndexLock) {
|
||||
ensureOpen();
|
||||
final Directory dir = this.spellIndex;
|
||||
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT).setRAMBufferSizeMB(ramMB));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(mergeFactor);
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(),
|
||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergeFactor(mergeFactor);
|
||||
writer.setRAMBufferSizeMB(ramMB);
|
||||
|
||||
Iterator<String> iter = dict.getWordsIterator();
|
||||
while (iter.hasNext()) {
|
||||
|
|
|
@ -20,11 +20,11 @@ package org.apache.lucene.search.spell;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -46,7 +46,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(store, new WhitespaceAnalyzer(LuceneTestCase.TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc;
|
||||
|
||||
|
|
|
@ -26,12 +26,12 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -54,7 +54,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
|
||||
//create a user index
|
||||
userindex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(userindex, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -19,11 +19,11 @@ package org.apache.lucene.queryParser.surround.query;
|
|||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
||||
public class SingleFieldTestDb {
|
||||
private Directory db;
|
||||
|
@ -35,7 +35,9 @@ public class SingleFieldTestDb {
|
|||
db = new RAMDirectory();
|
||||
docs = documents;
|
||||
fieldName = fName;
|
||||
IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(Version.LUCENE_CURRENT));
|
||||
Analyzer analyzer = new WhitespaceAnalyzer();
|
||||
IndexWriter writer = new IndexWriter(db, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int j = 0; j < docs.length; j++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -88,7 +87,7 @@ public class ListSearcher extends AbstractListModel {
|
|||
private ListDataListener listModelListener;
|
||||
|
||||
public ListSearcher(ListModel newModel) {
|
||||
analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
analyzer = new WhitespaceAnalyzer();
|
||||
setListModel(newModel);
|
||||
listModelListener = new ListModelHandler();
|
||||
newModel.addListDataListener(listModelListener);
|
||||
|
@ -118,7 +117,7 @@ public class ListSearcher extends AbstractListModel {
|
|||
try {
|
||||
// recreate the RAMDirectory
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// iterate through all rows
|
||||
for (int row=0; row < listModel.getSize(); row++){
|
||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.queryParser.MultiFieldQueryParser;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -116,7 +115,7 @@ public class TableSearcher extends AbstractTableModel {
|
|||
* @param tableModel The table model to decorate
|
||||
*/
|
||||
public TableSearcher(TableModel tableModel) {
|
||||
analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
|
||||
analyzer = new WhitespaceAnalyzer();
|
||||
tableModelListener = new TableModelHandler();
|
||||
setTableModel(tableModel);
|
||||
tableModel.addTableModelListener(tableModelListener);
|
||||
|
@ -164,7 +163,7 @@ public class TableSearcher extends AbstractTableModel {
|
|||
try {
|
||||
// recreate the RAMDirectory
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
// iterate through all rows
|
||||
for (int row=0; row < tableModel.getRowCount(); row++){
|
||||
|
|
|
@ -35,9 +35,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.LogMergePolicy;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
|
@ -248,10 +245,8 @@ public class Syns2Index
|
|||
try {
|
||||
|
||||
// override the specific index if it already exists
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(ana));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true); // why?
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true); // why?
|
||||
IndexWriter writer = new IndexWriter(dir, ana, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(true); // why?
|
||||
Iterator<String> i1 = word2Nums.keySet().iterator();
|
||||
while (i1.hasNext()) // for each word
|
||||
{
|
||||
|
|
|
@ -12,14 +12,12 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
|
@ -65,7 +63,7 @@ public class TestParser extends TestCase {
|
|||
{
|
||||
BufferedReader d = new BufferedReader(new InputStreamReader(TestParser.class.getResourceAsStream("reuters21578.txt")));
|
||||
dir=new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_24).setAnalyzer(analyzer));
|
||||
IndexWriter writer=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
String line = d.readLine();
|
||||
while(line!=null)
|
||||
{
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -142,7 +141,7 @@ public class TestQueryTemplateManager extends LuceneTestCase {
|
|||
|
||||
//Create an index
|
||||
RAMDirectory dir=new RAMDirectory();
|
||||
IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter w=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < docFieldValues.length; i++)
|
||||
{
|
||||
w.addDocument(getDocumentFromString(docFieldValues[i]));
|
||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.lucene.demo;
|
|||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
|
@ -57,10 +55,7 @@ public class IndexFiles {
|
|||
|
||||
Date start = new Date();
|
||||
try {
|
||||
IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR),
|
||||
new IndexWriterConfig(Version.LUCENE_CURRENT).setOpenMode(
|
||||
OpenMode.CREATE).setAnalyzer(
|
||||
new StandardAnalyzer(Version.LUCENE_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR), new StandardAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
System.out.println("Indexing to directory '" +INDEX_DIR+ "'...");
|
||||
indexDocs(writer, docDir);
|
||||
System.out.println("Optimizing...");
|
||||
|
|
|
@ -21,10 +21,8 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermEnum;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
|
@ -79,9 +77,8 @@ public class IndexHTML {
|
|||
deleting = true;
|
||||
indexDocs(root, index, create);
|
||||
}
|
||||
writer = new IndexWriter(FSDirectory.open(index), new IndexWriterConfig(
|
||||
Version.LUCENE_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
Version.LUCENE_CURRENT)).setMaxFieldLength(1000000).setOpenMode(create ? OpenMode.CREATE : OpenMode.CREATE_OR_APPEND));
|
||||
writer = new IndexWriter(FSDirectory.open(index), new StandardAnalyzer(Version.LUCENE_CURRENT), create,
|
||||
new IndexWriter.MaxFieldLength(1000000));
|
||||
indexDocs(root, index, create); // add new docs
|
||||
|
||||
System.out.println("Optimizing index...");
|
||||
|
|
|
@ -742,7 +742,7 @@ class DirectoryReader extends IndexReader implements Cloneable {
|
|||
|
||||
if (writeLock == null) {
|
||||
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
|
||||
if (!writeLock.obtain(IndexWriterConfig.WRITE_LOCK_TIMEOUT)) // obtain write lock
|
||||
if (!writeLock.obtain(IndexWriter.WRITE_LOCK_TIMEOUT)) // obtain write lock
|
||||
throw new LockObtainFailedException("Index locked for write: " + writeLock);
|
||||
this.writeLock = writeLock;
|
||||
|
||||
|
|
|
@ -138,7 +138,7 @@ final class DocumentsWriter {
|
|||
private DocFieldProcessor docFieldProcessor;
|
||||
|
||||
PrintStream infoStream;
|
||||
int maxFieldLength = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
|
||||
int maxFieldLength = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
|
||||
Similarity similarity;
|
||||
|
||||
List<String> newFiles;
|
||||
|
@ -223,7 +223,7 @@ final class DocumentsWriter {
|
|||
abstract DocConsumer getChain(DocumentsWriter documentsWriter);
|
||||
}
|
||||
|
||||
static final IndexingChain defaultIndexingChain = new IndexingChain() {
|
||||
static final IndexingChain DefaultIndexingChain = new IndexingChain() {
|
||||
|
||||
@Override
|
||||
DocConsumer getChain(DocumentsWriter documentsWriter) {
|
||||
|
@ -270,22 +270,22 @@ final class DocumentsWriter {
|
|||
|
||||
// The max number of delete terms that can be buffered before
|
||||
// they must be flushed to disk.
|
||||
private int maxBufferedDeleteTerms = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
|
||||
private int maxBufferedDeleteTerms = IndexWriter.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
|
||||
|
||||
// How much RAM we can use before flushing. This is 0 if
|
||||
// we are flushing by doc count instead.
|
||||
private long ramBufferSize = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
|
||||
private long ramBufferSize = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
|
||||
private long waitQueuePauseBytes = (long) (ramBufferSize*0.1);
|
||||
private long waitQueueResumeBytes = (long) (ramBufferSize*0.05);
|
||||
|
||||
// If we've allocated 5% over our RAM budget, we then
|
||||
// free down to 95%
|
||||
private long freeTrigger = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
|
||||
private long freeLevel = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
|
||||
private long freeTrigger = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
|
||||
private long freeLevel = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
|
||||
|
||||
// Flush @ this number of docs. If ramBufferSize is
|
||||
// non-zero we will flush by RAM usage instead.
|
||||
private int maxBufferedDocs = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
|
||||
private int maxBufferedDocs = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
|
||||
|
||||
private int flushedDocCount; // How many docs already flushed to index
|
||||
|
||||
|
@ -304,7 +304,7 @@ final class DocumentsWriter {
|
|||
DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain) throws IOException {
|
||||
this.directory = directory;
|
||||
this.writer = writer;
|
||||
this.similarity = writer.getConfig().getSimilarity();
|
||||
this.similarity = writer.getSimilarity();
|
||||
flushedDocCount = writer.maxDoc();
|
||||
|
||||
consumer = indexingChain.getChain(this);
|
||||
|
@ -342,8 +342,8 @@ final class DocumentsWriter {
|
|||
|
||||
/** Set how much RAM we can use before flushing. */
|
||||
synchronized void setRAMBufferSizeMB(double mb) {
|
||||
if (mb == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
|
||||
ramBufferSize = IndexWriterConfig.DISABLE_AUTO_FLUSH;
|
||||
if (mb == IndexWriter.DISABLE_AUTO_FLUSH) {
|
||||
ramBufferSize = IndexWriter.DISABLE_AUTO_FLUSH;
|
||||
waitQueuePauseBytes = 4*1024*1024;
|
||||
waitQueueResumeBytes = 2*1024*1024;
|
||||
} else {
|
||||
|
@ -356,7 +356,7 @@ final class DocumentsWriter {
|
|||
}
|
||||
|
||||
synchronized double getRAMBufferSizeMB() {
|
||||
if (ramBufferSize == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
|
||||
if (ramBufferSize == IndexWriter.DISABLE_AUTO_FLUSH) {
|
||||
return ramBufferSize;
|
||||
} else {
|
||||
return ramBufferSize/1024./1024.;
|
||||
|
@ -587,7 +587,7 @@ final class DocumentsWriter {
|
|||
|
||||
synchronized private void initFlushState(boolean onlyDocStore) {
|
||||
initSegmentName(onlyDocStore);
|
||||
flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getConfig().getTermIndexInterval());
|
||||
flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getTermIndexInterval());
|
||||
}
|
||||
|
||||
/** Flush all pending docs to a new segment */
|
||||
|
@ -766,7 +766,7 @@ final class DocumentsWriter {
|
|||
// always get N docs when we flush by doc count, even if
|
||||
// > 1 thread is adding documents:
|
||||
if (!flushPending &&
|
||||
maxBufferedDocs != IndexWriterConfig.DISABLE_AUTO_FLUSH
|
||||
maxBufferedDocs != IndexWriter.DISABLE_AUTO_FLUSH
|
||||
&& numDocsInRAM >= maxBufferedDocs) {
|
||||
flushPending = true;
|
||||
state.doFlushAfter = true;
|
||||
|
@ -928,9 +928,9 @@ final class DocumentsWriter {
|
|||
}
|
||||
|
||||
synchronized boolean deletesFull() {
|
||||
return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
|
||||
return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
|
||||
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed + numBytesUsed) >= ramBufferSize) ||
|
||||
(maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
|
||||
(maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
|
||||
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
|
||||
}
|
||||
|
||||
|
@ -943,9 +943,9 @@ final class DocumentsWriter {
|
|||
// too-frequent flushing of a long tail of tiny segments
|
||||
// when merges (which always apply deletes) are
|
||||
// infrequent.
|
||||
return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
|
||||
return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
|
||||
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed) >= ramBufferSize/2) ||
|
||||
(maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
|
||||
(maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
|
||||
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
|
||||
}
|
||||
|
||||
|
@ -1115,7 +1115,7 @@ final class DocumentsWriter {
|
|||
}
|
||||
|
||||
synchronized boolean doBalanceRAM() {
|
||||
return ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
|
||||
return ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
|
||||
}
|
||||
|
||||
/** Does the synchronized work to finish/flush the
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.lucene.index;
|
|||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.DocumentsWriter.IndexingChain;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -29,7 +29,6 @@ import org.apache.lucene.store.AlreadyClosedException;
|
|||
import org.apache.lucene.store.BufferedIndexInput;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Closeable;
|
||||
|
@ -180,11 +179,10 @@ public class IndexWriter implements Closeable {
|
|||
/**
|
||||
* Default value for the write lock timeout (1,000).
|
||||
* @see #setDefaultWriteLockTimeout
|
||||
* @deprecated use {@link IndexWriterConfig#WRITE_LOCK_TIMEOUT} instead
|
||||
*/
|
||||
public static long WRITE_LOCK_TIMEOUT = IndexWriterConfig.WRITE_LOCK_TIMEOUT;
|
||||
public static long WRITE_LOCK_TIMEOUT = 1000;
|
||||
|
||||
private long writeLockTimeout;
|
||||
private long writeLockTimeout = WRITE_LOCK_TIMEOUT;
|
||||
|
||||
/**
|
||||
* Name of the write lock in the index.
|
||||
|
@ -193,43 +191,36 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
/**
|
||||
* Value to denote a flush trigger is disabled
|
||||
* @deprecated use {@link IndexWriterConfig#DISABLE_AUTO_FLUSH} instead
|
||||
*/
|
||||
public final static int DISABLE_AUTO_FLUSH = IndexWriterConfig.DISABLE_AUTO_FLUSH;
|
||||
public final static int DISABLE_AUTO_FLUSH = -1;
|
||||
|
||||
/**
|
||||
* Disabled by default (because IndexWriter flushes by RAM usage
|
||||
* by default). Change using {@link #setMaxBufferedDocs(int)}.
|
||||
* @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DOCS} instead.
|
||||
*/
|
||||
public final static int DEFAULT_MAX_BUFFERED_DOCS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
|
||||
public final static int DEFAULT_MAX_BUFFERED_DOCS = DISABLE_AUTO_FLUSH;
|
||||
|
||||
/**
|
||||
* Default value is 16 MB (which means flush when buffered
|
||||
* docs consume 16 MB RAM). Change using {@link #setRAMBufferSizeMB}.
|
||||
* @deprecated use {@link IndexWriterConfig#DEFAULT_RAM_BUFFER_SIZE_MB} instead.
|
||||
*/
|
||||
public final static double DEFAULT_RAM_BUFFER_SIZE_MB = IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
|
||||
public final static double DEFAULT_RAM_BUFFER_SIZE_MB = 16.0;
|
||||
|
||||
/**
|
||||
* Disabled by default (because IndexWriter flushes by RAM usage
|
||||
* by default). Change using {@link #setMaxBufferedDeleteTerms(int)}.
|
||||
* @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DELETE_TERMS} instead
|
||||
*/
|
||||
public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
|
||||
public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
|
||||
|
||||
/**
|
||||
* Default value is 10,000. Change using {@link #setMaxFieldLength(int)}.
|
||||
*
|
||||
* @deprecated see {@link IndexWriterConfig}
|
||||
*/
|
||||
public final static int DEFAULT_MAX_FIELD_LENGTH = 10000;
|
||||
|
||||
/**
|
||||
* Default value is 128. Change using {@link #setTermIndexInterval(int)}.
|
||||
* @deprecated use {@link IndexWriterConfig#DEFAULT_TERM_INDEX_INTERVAL} instead.
|
||||
*/
|
||||
public final static int DEFAULT_TERM_INDEX_INTERVAL = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
|
||||
public final static int DEFAULT_TERM_INDEX_INTERVAL = 128;
|
||||
|
||||
/**
|
||||
* Absolute hard maximum length for a term. If a term
|
||||
|
@ -253,11 +244,10 @@ public class IndexWriter implements Closeable {
|
|||
private int messageID = -1;
|
||||
volatile private boolean hitOOM;
|
||||
|
||||
private final Directory directory; // where this index resides
|
||||
private final Analyzer analyzer; // how to analyze text
|
||||
private Directory directory; // where this index resides
|
||||
private Analyzer analyzer; // how to analyze text
|
||||
|
||||
// TODO 4.0: this should be made final once the setter is out
|
||||
private /*final*/Similarity similarity = Similarity.getDefault(); // how to normalize
|
||||
private Similarity similarity = Similarity.getDefault(); // how to normalize
|
||||
|
||||
private volatile long changeCount; // increments every time a change is completed
|
||||
private long lastCommitChangeCount; // last changeCount that was committed
|
||||
|
@ -280,8 +270,7 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
private Lock writeLock;
|
||||
|
||||
// TODO 4.0: this should be made final once the setter is out
|
||||
private /*final*/int termIndexInterval;
|
||||
private int termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL;
|
||||
|
||||
private boolean closed;
|
||||
private boolean closing;
|
||||
|
@ -291,8 +280,7 @@ public class IndexWriter implements Closeable {
|
|||
private HashSet<SegmentInfo> mergingSegments = new HashSet<SegmentInfo>();
|
||||
|
||||
private MergePolicy mergePolicy = new LogByteSizeMergePolicy(this);
|
||||
// TODO 4.0: this should be made final once the setter is removed
|
||||
private /*final*/MergeScheduler mergeScheduler;
|
||||
private MergeScheduler mergeScheduler = new ConcurrentMergeScheduler();
|
||||
private LinkedList<MergePolicy.OneMerge> pendingMerges = new LinkedList<MergePolicy.OneMerge>();
|
||||
private Set<MergePolicy.OneMerge> runningMerges = new HashSet<MergePolicy.OneMerge>();
|
||||
private List<MergePolicy.OneMerge> mergeExceptions = new ArrayList<MergePolicy.OneMerge>();
|
||||
|
@ -319,11 +307,7 @@ public class IndexWriter implements Closeable {
|
|||
// deletes, doing merges, and reopening near real-time
|
||||
// readers.
|
||||
private volatile boolean poolReaders;
|
||||
|
||||
// The instance that was passed to the constructor. It is saved only in order
|
||||
// to allow users to query an IndexWriter settings.
|
||||
private final IndexWriterConfig config;
|
||||
|
||||
|
||||
/**
|
||||
* Expert: returns a readonly reader, covering all
|
||||
* committed as well as un-committed changes to the index.
|
||||
|
@ -793,29 +777,19 @@ public class IndexWriter implements Closeable {
|
|||
* Otherwise an IllegalArgumentException is thrown.</p>
|
||||
*
|
||||
* @see #setUseCompoundFile(boolean)
|
||||
* @deprecated use {@link LogMergePolicy#getUseCompoundDocStore()} and
|
||||
* {@link LogMergePolicy#getUseCompoundFile()} directly.
|
||||
*/
|
||||
public boolean getUseCompoundFile() {
|
||||
return getLogMergePolicy().getUseCompoundFile();
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Setting to turn on usage of a compound file. When on, multiple files for
|
||||
* each segment are merged into a single file when a new segment is flushed.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* Note that this method is a convenience method: it just calls
|
||||
* mergePolicy.setUseCompoundFile as long as mergePolicy is an instance of
|
||||
* {@link LogMergePolicy}. Otherwise an IllegalArgumentException is thrown.
|
||||
* </p>
|
||||
*
|
||||
* @deprecated use {@link LogMergePolicy#setUseCompoundDocStore(boolean)} and
|
||||
* {@link LogMergePolicy#setUseCompoundFile(boolean)} directly.
|
||||
* Note that this method set the given value on both, therefore
|
||||
* you should consider doing the same.
|
||||
/** <p>Setting to turn on usage of a compound file. When on,
|
||||
* multiple files for each segment are merged into a
|
||||
* single file when a new segment is flushed.</p>
|
||||
*
|
||||
* <p>Note that this method is a convenience method: it
|
||||
* just calls mergePolicy.setUseCompoundFile as long as
|
||||
* mergePolicy is an instance of {@link LogMergePolicy}.
|
||||
* Otherwise an IllegalArgumentException is thrown.</p>
|
||||
*/
|
||||
public void setUseCompoundFile(boolean value) {
|
||||
getLogMergePolicy().setUseCompoundFile(value);
|
||||
|
@ -825,25 +799,20 @@ public class IndexWriter implements Closeable {
|
|||
/** Expert: Set the Similarity implementation used by this IndexWriter.
|
||||
*
|
||||
* @see Similarity#setDefault(Similarity)
|
||||
* @deprecated use {@link IndexWriterConfig#setSimilarity(Similarity)} instead
|
||||
*/
|
||||
public void setSimilarity(Similarity similarity) {
|
||||
ensureOpen();
|
||||
this.similarity = similarity;
|
||||
docWriter.setSimilarity(similarity);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setSimilarity(similarity);
|
||||
}
|
||||
|
||||
/** Expert: Return the Similarity implementation used by this IndexWriter.
|
||||
*
|
||||
* <p>This defaults to the current value of {@link Similarity#getDefault()}.
|
||||
* @deprecated use {@link IndexWriterConfig#getSimilarity()} instead
|
||||
*/
|
||||
public Similarity getSimilarity() {
|
||||
ensureOpen();
|
||||
return similarity;
|
||||
return this.similarity;
|
||||
}
|
||||
|
||||
/** Expert: Set the interval between indexed terms. Large values cause less
|
||||
|
@ -866,20 +835,15 @@ public class IndexWriter implements Closeable {
|
|||
* must be scanned for each random term access.
|
||||
*
|
||||
* @see #DEFAULT_TERM_INDEX_INTERVAL
|
||||
* @deprecated use {@link IndexWriterConfig#setTermIndexInterval(int)}
|
||||
*/
|
||||
public void setTermIndexInterval(int interval) {
|
||||
ensureOpen();
|
||||
this.termIndexInterval = interval;
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setTermIndexInterval(interval);
|
||||
}
|
||||
|
||||
/** Expert: Return the interval between indexed terms.
|
||||
*
|
||||
* @see #setTermIndexInterval(int)
|
||||
* @deprecated use {@link IndexWriterConfig#getTermIndexInterval()}
|
||||
*/
|
||||
public int getTermIndexInterval() {
|
||||
// We pass false because this method is called by SegmentMerger while we are in the process of closing
|
||||
|
@ -908,13 +872,10 @@ public class IndexWriter implements Closeable {
|
|||
* if it does not exist and <code>create</code> is
|
||||
* <code>false</code> or if there is any other low-level
|
||||
* IO error
|
||||
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
|
||||
*/
|
||||
public IndexWriter(Directory d, Analyzer a, boolean create, MaxFieldLength mfl)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a).setOpenMode(
|
||||
create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
|
||||
mfl.getLimit()));
|
||||
init(d, a, create, null, mfl.getLimit(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -934,12 +895,10 @@ public class IndexWriter implements Closeable {
|
|||
* @throws IOException if the directory cannot be
|
||||
* read/written to or if there is any other low-level
|
||||
* IO error
|
||||
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
|
||||
*/
|
||||
public IndexWriter(Directory d, Analyzer a, MaxFieldLength mfl)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
|
||||
.setMaxFieldLength(mfl.getLimit()));
|
||||
init(d, a, null, mfl.getLimit(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -959,13 +918,10 @@ public class IndexWriter implements Closeable {
|
|||
* @throws IOException if the directory cannot be
|
||||
* read/written to or if there is any other low-level
|
||||
* IO error
|
||||
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
|
||||
*/
|
||||
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
|
||||
.setMaxFieldLength(mfl.getLimit()).setIndexDeletionPolicy(
|
||||
deletionPolicy));
|
||||
init(d, a, deletionPolicy, mfl.getLimit(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -991,13 +947,43 @@ public class IndexWriter implements Closeable {
|
|||
* if it does not exist and <code>create</code> is
|
||||
* <code>false</code> or if there is any other low-level
|
||||
* IO error
|
||||
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
|
||||
*/
|
||||
public IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a).setOpenMode(
|
||||
create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
|
||||
mfl.getLimit()).setIndexDeletionPolicy(deletionPolicy));
|
||||
init(d, a, create, deletionPolicy, mfl.getLimit(), null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: constructs an IndexWriter with a custom {@link
|
||||
* IndexDeletionPolicy} and {@link IndexingChain},
|
||||
* for the index in <code>d</code>.
|
||||
* Text will be analyzed with <code>a</code>. If
|
||||
* <code>create</code> is true, then a new, empty index
|
||||
* will be created in <code>d</code>, replacing the index
|
||||
* already there, if any.
|
||||
*
|
||||
* @param d the index directory
|
||||
* @param a the analyzer to use
|
||||
* @param create <code>true</code> to create the index or overwrite
|
||||
* the existing one; <code>false</code> to append to the existing
|
||||
* index
|
||||
* @param deletionPolicy see <a href="#deletionPolicy">above</a>
|
||||
* @param mfl whether or not to limit field lengths, value is in number of terms/tokens. See {@link org.apache.lucene.index.IndexWriter.MaxFieldLength}.
|
||||
* @param indexingChain the {@link DocConsumer} chain to be used to
|
||||
* process documents
|
||||
* @param commit which commit to open
|
||||
* @throws CorruptIndexException if the index is corrupt
|
||||
* @throws LockObtainFailedException if another writer
|
||||
* has this index open (<code>write.lock</code> could not
|
||||
* be obtained)
|
||||
* @throws IOException if the directory cannot be read/written to, or
|
||||
* if it does not exist and <code>create</code> is
|
||||
* <code>false</code> or if there is any other low-level
|
||||
* IO error
|
||||
*/
|
||||
IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexingChain indexingChain, IndexCommit commit)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
init(d, a, create, deletionPolicy, mfl.getLimit(), indexingChain, commit);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1031,74 +1017,44 @@ public class IndexWriter implements Closeable {
|
|||
* if it does not exist and <code>create</code> is
|
||||
* <code>false</code> or if there is any other low-level
|
||||
* IO error
|
||||
* @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
|
||||
*/
|
||||
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexCommit commit)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
this(d, new IndexWriterConfig(Version.LUCENE_31).setAnalyzer(a)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxFieldLength(mfl.getLimit())
|
||||
.setIndexDeletionPolicy(deletionPolicy).setIndexCommit(commit));
|
||||
init(d, a, false, deletionPolicy, mfl.getLimit(), null, commit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new IndexWriter per the settings given in <code>conf</code>.
|
||||
* Note that the passed in {@link IndexWriterConfig} is cloned and thus making
|
||||
* changes to it after IndexWriter has been instantiated will not affect
|
||||
* IndexWriter. Additionally, calling {@link #getConfig()} and changing the
|
||||
* parameters does not affect that IndexWriter instance.
|
||||
* <p>
|
||||
* <b>NOTE:</b> by default, {@link IndexWriterConfig#getMaxFieldLength()}
|
||||
* returns {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH}. Pay attention to
|
||||
* whether this setting fits your application.
|
||||
*
|
||||
* @param d
|
||||
* the index directory. The index is either created or appended
|
||||
* according <code>conf.getOpenMode()</code>.
|
||||
* @param conf
|
||||
* the configuration settings according to which IndexWriter should
|
||||
* be initalized.
|
||||
* @throws CorruptIndexException
|
||||
* if the index is corrupt
|
||||
* @throws LockObtainFailedException
|
||||
* if another writer has this index open (<code>write.lock</code>
|
||||
* could not be obtained)
|
||||
* @throws IOException
|
||||
* if the directory cannot be read/written to, or if it does not
|
||||
* exist and <code>conf.getOpenMode()</code> is
|
||||
* <code>OpenMode.APPEND</code> or if there is any other low-level
|
||||
* IO error
|
||||
*/
|
||||
public IndexWriter(Directory d, IndexWriterConfig conf)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
config = (IndexWriterConfig) conf.clone();
|
||||
directory = d;
|
||||
analyzer = conf.getAnalyzer();
|
||||
setMessageID(defaultInfoStream);
|
||||
maxFieldLength = conf.getMaxFieldLength();
|
||||
termIndexInterval = conf.getTermIndexInterval();
|
||||
writeLockTimeout = conf.getWriteLockTimeout();
|
||||
similarity = conf.getSimilarity();
|
||||
mergeScheduler = conf.getMergeScheduler();
|
||||
|
||||
OpenMode mode = conf.getOpenMode();
|
||||
boolean create;
|
||||
if (mode == OpenMode.CREATE) {
|
||||
create = true;
|
||||
} else if (mode == OpenMode.APPEND) {
|
||||
create = false;
|
||||
private void init(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy,
|
||||
int maxFieldLength, IndexingChain indexingChain, IndexCommit commit)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
if (IndexReader.indexExists(d)) {
|
||||
init(d, a, false, deletionPolicy, maxFieldLength, indexingChain, commit);
|
||||
} else {
|
||||
// CREATE_OR_APPEND - create only if an index does not exist
|
||||
create = !IndexReader.indexExists(directory);
|
||||
init(d, a, true, deletionPolicy, maxFieldLength, indexingChain, commit);
|
||||
}
|
||||
}
|
||||
|
||||
private void init(Directory d, Analyzer a, final boolean create,
|
||||
IndexDeletionPolicy deletionPolicy, int maxFieldLength,
|
||||
IndexingChain indexingChain, IndexCommit commit)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
|
||||
directory = d;
|
||||
analyzer = a;
|
||||
setMessageID(defaultInfoStream);
|
||||
this.maxFieldLength = maxFieldLength;
|
||||
|
||||
if (indexingChain == null)
|
||||
indexingChain = DocumentsWriter.DefaultIndexingChain;
|
||||
|
||||
if (create) {
|
||||
// Clear the write lock in case it's leftover:
|
||||
directory.clearLock(WRITE_LOCK_NAME);
|
||||
}
|
||||
|
||||
writeLock = directory.makeLock(WRITE_LOCK_NAME);
|
||||
Lock writeLock = directory.makeLock(WRITE_LOCK_NAME);
|
||||
if (!writeLock.obtain(writeLockTimeout)) // obtain write lock
|
||||
throw new LockObtainFailedException("Index locked for write: " + writeLock);
|
||||
this.writeLock = writeLock; // save it
|
||||
|
||||
try {
|
||||
if (create) {
|
||||
|
@ -1129,7 +1085,6 @@ public class IndexWriter implements Closeable {
|
|||
} else {
|
||||
segmentInfos.read(directory);
|
||||
|
||||
IndexCommit commit = conf.getIndexCommit();
|
||||
if (commit != null) {
|
||||
// Swap out all segments, but, keep metadata in
|
||||
// SegmentInfos, like version & generation, to
|
||||
|
@ -1153,14 +1108,14 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
setRollbackSegmentInfos(segmentInfos);
|
||||
|
||||
docWriter = new DocumentsWriter(directory, this, conf.getIndexingChain());
|
||||
docWriter = new DocumentsWriter(directory, this, indexingChain);
|
||||
docWriter.setInfoStream(infoStream);
|
||||
docWriter.setMaxFieldLength(maxFieldLength);
|
||||
|
||||
// Default deleter (for backwards compatibility) is
|
||||
// KeepOnlyLastCommitDeleter:
|
||||
deleter = new IndexFileDeleter(directory,
|
||||
conf.getIndexDeletionPolicy(),
|
||||
deletionPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : deletionPolicy,
|
||||
segmentInfos, infoStream, docWriter);
|
||||
|
||||
if (deleter.startingCommitDeleted)
|
||||
|
@ -1170,22 +1125,20 @@ public class IndexWriter implements Closeable {
|
|||
// segments_N file.
|
||||
changeCount++;
|
||||
|
||||
docWriter.setMaxBufferedDeleteTerms(conf.getMaxBufferedDeleteTerms());
|
||||
docWriter.setRAMBufferSizeMB(conf.getRAMBufferSizeMB());
|
||||
docWriter.setMaxBufferedDocs(conf.getMaxBufferedDocs());
|
||||
pushMaxBufferedDocs();
|
||||
|
||||
if (infoStream != null) {
|
||||
message("init: create=" + create);
|
||||
messageState();
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
writeLock.release();
|
||||
writeLock = null;
|
||||
this.writeLock.release();
|
||||
this.writeLock = null;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private synchronized void setRollbackSegmentInfos(SegmentInfos infos) {
|
||||
rollbackSegmentInfos = (SegmentInfos) infos.clone();
|
||||
assert !rollbackSegmentInfos.hasExternalSegments(directory);
|
||||
|
@ -1195,19 +1148,6 @@ public class IndexWriter implements Closeable {
|
|||
rollbackSegments.put(rollbackSegmentInfos.info(i), Integer.valueOf(i));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link IndexWriterConfig} that was passed to
|
||||
* {@link #IndexWriter(Directory, IndexWriterConfig)}. This allows querying
|
||||
* IndexWriter's settings.
|
||||
* <p>
|
||||
* <b>NOTE:</b> setting any parameter on the returned instance has not effect
|
||||
* on the IndexWriter instance. If you need to change those settings after
|
||||
* IndexWriter has been created, you need to instantiate a new IndexWriter.
|
||||
*/
|
||||
public IndexWriterConfig getConfig() {
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the merge policy used by this writer.
|
||||
*/
|
||||
|
@ -1235,7 +1175,6 @@ public class IndexWriter implements Closeable {
|
|||
|
||||
/**
|
||||
* Expert: set the merge scheduler used by this writer.
|
||||
* @deprecated use {@link IndexWriterConfig#setMergeScheduler(MergeScheduler)} instead
|
||||
*/
|
||||
synchronized public void setMergeScheduler(MergeScheduler mergeScheduler) throws CorruptIndexException, IOException {
|
||||
ensureOpen();
|
||||
|
@ -1249,16 +1188,12 @@ public class IndexWriter implements Closeable {
|
|||
this.mergeScheduler = mergeScheduler;
|
||||
if (infoStream != null)
|
||||
message("setMergeScheduler " + mergeScheduler);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setMergeScheduler(mergeScheduler);
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: returns the current MergeScheduler in use by this
|
||||
* Expert: returns the current MergePolicy in use by this
|
||||
* writer.
|
||||
* @see #setMergeScheduler(MergeScheduler)
|
||||
* @deprecated use {@link IndexWriterConfig#getMergeScheduler()} instead
|
||||
* @see #setMergePolicy
|
||||
*/
|
||||
public MergeScheduler getMergeScheduler() {
|
||||
ensureOpen();
|
||||
|
@ -1284,7 +1219,6 @@ public class IndexWriter implements Closeable {
|
|||
* LogByteSizeMergePolicy}) also allows you to set this
|
||||
* limit by net size (in MB) of the segment, using {@link
|
||||
* LogByteSizeMergePolicy#setMaxMergeMB}.</p>
|
||||
* @deprecated use {@link LogMergePolicy#setMaxMergeDocs(int)} directly.
|
||||
*/
|
||||
public void setMaxMergeDocs(int maxMergeDocs) {
|
||||
getLogMergePolicy().setMaxMergeDocs(maxMergeDocs);
|
||||
|
@ -1300,7 +1234,6 @@ public class IndexWriter implements Closeable {
|
|||
* Otherwise an IllegalArgumentException is thrown.</p>
|
||||
*
|
||||
* @see #setMaxMergeDocs
|
||||
* @deprecated use {@link LogMergePolicy#getMaxMergeDocs()} directly.
|
||||
*/
|
||||
public int getMaxMergeDocs() {
|
||||
return getLogMergePolicy().getMaxMergeDocs();
|
||||
|
@ -1319,7 +1252,6 @@ public class IndexWriter implements Closeable {
|
|||
* is your memory, but you should anticipate an OutOfMemoryError.<p/>
|
||||
* By default, no more than {@link #DEFAULT_MAX_FIELD_LENGTH} terms
|
||||
* will be indexed for a field.
|
||||
* @deprecated use {@link IndexWriterConfig#setMaxFieldLength(int)} instead
|
||||
*/
|
||||
public void setMaxFieldLength(int maxFieldLength) {
|
||||
ensureOpen();
|
||||
|
@ -1327,16 +1259,12 @@ public class IndexWriter implements Closeable {
|
|||
docWriter.setMaxFieldLength(maxFieldLength);
|
||||
if (infoStream != null)
|
||||
message("setMaxFieldLength " + maxFieldLength);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setMaxFieldLength(maxFieldLength);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the maximum number of terms that will be
|
||||
* indexed for a single field in a document.
|
||||
* @see #setMaxFieldLength
|
||||
* @deprecated use {@link IndexWriterConfig#getMaxFieldLength()} instead
|
||||
*/
|
||||
public int getMaxFieldLength() {
|
||||
ensureOpen();
|
||||
|
@ -1361,7 +1289,6 @@ public class IndexWriter implements Closeable {
|
|||
* enabled but smaller than 2, or it disables maxBufferedDocs
|
||||
* when ramBufferSize is already disabled
|
||||
* @see #setRAMBufferSizeMB
|
||||
* @deprecated use {@link IndexWriterConfig#setMaxBufferedDocs(int)} instead.
|
||||
*/
|
||||
public void setMaxBufferedDocs(int maxBufferedDocs) {
|
||||
ensureOpen();
|
||||
|
@ -1376,9 +1303,6 @@ public class IndexWriter implements Closeable {
|
|||
pushMaxBufferedDocs();
|
||||
if (infoStream != null)
|
||||
message("setMaxBufferedDocs " + maxBufferedDocs);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setMaxBufferedDocs(maxBufferedDocs);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1405,7 +1329,6 @@ public class IndexWriter implements Closeable {
|
|||
* Returns the number of buffered added documents that will
|
||||
* trigger a flush if enabled.
|
||||
* @see #setMaxBufferedDocs
|
||||
* @deprecated use {@link IndexWriterConfig#getMaxBufferedDocs()} instead.
|
||||
*/
|
||||
public int getMaxBufferedDocs() {
|
||||
ensureOpen();
|
||||
|
@ -1449,7 +1372,6 @@ public class IndexWriter implements Closeable {
|
|||
* @throws IllegalArgumentException if ramBufferSize is
|
||||
* enabled but non-positive, or it disables ramBufferSize
|
||||
* when maxBufferedDocs is already disabled
|
||||
* @deprecated use {@link IndexWriterConfig#setRAMBufferSizeMB(double)} instead.
|
||||
*/
|
||||
public void setRAMBufferSizeMB(double mb) {
|
||||
if (mb > 2048.0) {
|
||||
|
@ -1464,14 +1386,10 @@ public class IndexWriter implements Closeable {
|
|||
docWriter.setRAMBufferSizeMB(mb);
|
||||
if (infoStream != null)
|
||||
message("setRAMBufferSizeMB " + mb);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setRAMBufferSizeMB(mb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value set by {@link #setRAMBufferSizeMB} if enabled.
|
||||
* @deprecated use {@link IndexWriterConfig#getRAMBufferSizeMB()} instead.
|
||||
*/
|
||||
public double getRAMBufferSizeMB() {
|
||||
return docWriter.getRAMBufferSizeMB();
|
||||
|
@ -1488,7 +1406,6 @@ public class IndexWriter implements Closeable {
|
|||
* @throws IllegalArgumentException if maxBufferedDeleteTerms
|
||||
* is enabled but smaller than 1
|
||||
* @see #setRAMBufferSizeMB
|
||||
* @deprecated use {@link IndexWriterConfig#setMaxBufferedDeleteTerms(int)} instead.
|
||||
*/
|
||||
public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
|
||||
ensureOpen();
|
||||
|
@ -1499,16 +1416,12 @@ public class IndexWriter implements Closeable {
|
|||
docWriter.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
|
||||
if (infoStream != null)
|
||||
message("setMaxBufferedDeleteTerms " + maxBufferedDeleteTerms);
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of buffered deleted terms that will
|
||||
* trigger a flush if enabled.
|
||||
* @see #setMaxBufferedDeleteTerms
|
||||
* @deprecated use {@link IndexWriterConfig#getMaxBufferedDeleteTerms()} instead
|
||||
*/
|
||||
public int getMaxBufferedDeleteTerms() {
|
||||
ensureOpen();
|
||||
|
@ -1529,7 +1442,6 @@ public class IndexWriter implements Closeable {
|
|||
* Otherwise an IllegalArgumentException is thrown.</p>
|
||||
*
|
||||
* <p>This must never be less than 2. The default value is 10.
|
||||
* @deprecated use {@link LogMergePolicy#setMergeFactor(int)} directly.
|
||||
*/
|
||||
public void setMergeFactor(int mergeFactor) {
|
||||
getLogMergePolicy().setMergeFactor(mergeFactor);
|
||||
|
@ -1546,7 +1458,6 @@ public class IndexWriter implements Closeable {
|
|||
* Otherwise an IllegalArgumentException is thrown.</p>
|
||||
*
|
||||
* @see #setMergeFactor
|
||||
* @deprecated use {@link LogMergePolicy#getMergeFactor()} directly.
|
||||
*/
|
||||
public int getMergeFactor() {
|
||||
return getLogMergePolicy().getMergeFactor();
|
||||
|
@ -1583,11 +1494,15 @@ public class IndexWriter implements Closeable {
|
|||
}
|
||||
|
||||
private void messageState() {
|
||||
message("\ndir=" + directory + "\n" +
|
||||
"mergePolicy=" + mergePolicy + "\n" +
|
||||
"index=" + segString() + "\n" +
|
||||
"version=" + Constants.LUCENE_VERSION + "\n" +
|
||||
config.toString());
|
||||
message("setInfoStream: dir=" + directory +
|
||||
" mergePolicy=" + mergePolicy +
|
||||
" mergeScheduler=" + mergeScheduler +
|
||||
" ramBufferSizeMB=" + docWriter.getRAMBufferSizeMB() +
|
||||
" maxBufferedDocs=" + docWriter.getMaxBufferedDocs() +
|
||||
" maxBuffereDeleteTerms=" + docWriter.getMaxBufferedDeleteTerms() +
|
||||
" maxFieldLength=" + maxFieldLength +
|
||||
" index=" + segString() +
|
||||
" version=" + Constants.LUCENE_VERSION);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1607,20 +1522,15 @@ public class IndexWriter implements Closeable {
|
|||
/**
|
||||
* Sets the maximum time to wait for a write lock (in milliseconds) for this instance of IndexWriter. @see
|
||||
* @see #setDefaultWriteLockTimeout to change the default value for all instances of IndexWriter.
|
||||
* @deprecated use {@link IndexWriterConfig#setWriteLockTimeout(long)} instead
|
||||
*/
|
||||
public void setWriteLockTimeout(long writeLockTimeout) {
|
||||
ensureOpen();
|
||||
this.writeLockTimeout = writeLockTimeout;
|
||||
// Required so config.getSimilarity returns the right value. But this will
|
||||
// go away together with the method in 4.0.
|
||||
config.setWriteLockTimeout(writeLockTimeout);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns allowed timeout when acquiring the write lock.
|
||||
* @see #setWriteLockTimeout
|
||||
* @deprecated use {@link IndexWriterConfig#getWriteLockTimeout()}
|
||||
*/
|
||||
public long getWriteLockTimeout() {
|
||||
ensureOpen();
|
||||
|
@ -1630,20 +1540,18 @@ public class IndexWriter implements Closeable {
|
|||
/**
|
||||
* Sets the default (for any instance of IndexWriter) maximum time to wait for a write lock (in
|
||||
* milliseconds).
|
||||
* @deprecated use {@link IndexWriterConfig#setDefaultWriteLockTimeout(long)} instead
|
||||
*/
|
||||
public static void setDefaultWriteLockTimeout(long writeLockTimeout) {
|
||||
IndexWriterConfig.setDefaultWriteLockTimeout(writeLockTimeout);
|
||||
IndexWriter.WRITE_LOCK_TIMEOUT = writeLockTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns default write lock timeout for newly
|
||||
* instantiated IndexWriters.
|
||||
* @see #setDefaultWriteLockTimeout
|
||||
* @deprecated use {@link IndexWriterConfig#getDefaultWriteLockTimeout()} instead
|
||||
*/
|
||||
public static long getDefaultWriteLockTimeout() {
|
||||
return IndexWriterConfig.getDefaultWriteLockTimeout();
|
||||
return IndexWriter.WRITE_LOCK_TIMEOUT;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -4877,13 +4785,9 @@ public class IndexWriter implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Specifies maximum field length (in number of tokens/terms) in
|
||||
* {@link IndexWriter} constructors. {@link #setMaxFieldLength(int)} overrides
|
||||
* the value set by the constructor.
|
||||
*
|
||||
* @deprecated use {@link IndexWriterConfig} and pass
|
||||
* {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH} or your own
|
||||
* value.
|
||||
* Specifies maximum field length (in number of tokens/terms) in {@link IndexWriter} constructors.
|
||||
* {@link #setMaxFieldLength(int)} overrides the value set by
|
||||
* the constructor.
|
||||
*/
|
||||
public static final class MaxFieldLength {
|
||||
|
||||
|
|
|
@ -1,518 +0,0 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.DocumentsWriter.IndexingChain;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.util.Version;
|
||||
|
||||
/**
|
||||
* Holds all the configuration of {@link IndexWriter}. This object is only used
|
||||
* while constructing a new IndexWriter. Those settings cannot be changed
|
||||
* afterwards, except instantiating a new IndexWriter.
|
||||
* <p>
|
||||
* All setter methods return {@link IndexWriterConfig} to allow chaining
|
||||
* settings conveniently. Thus someone can do:
|
||||
*
|
||||
* <pre>
|
||||
* IndexWriterConfig conf = new IndexWriterConfig(analyzer);
|
||||
* conf.setter1().setter2();
|
||||
* </pre>
|
||||
*
|
||||
* @since 3.1
|
||||
*/
|
||||
public final class IndexWriterConfig implements Cloneable {
|
||||
|
||||
public static final int UNLIMITED_FIELD_LENGTH = Integer.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* Specifies the open mode for {@link IndexWriter}:
|
||||
* <ul>
|
||||
* {@link #CREATE} - creates a new index or overwrites an existing one.
|
||||
* {@link #CREATE_OR_APPEND} - creates a new index if one does not exist,
|
||||
* otherwise it opens the index and documents will be appended.
|
||||
* {@link #APPEND} - opens an existing index.
|
||||
* </ul>
|
||||
*/
|
||||
public static enum OpenMode { CREATE, APPEND, CREATE_OR_APPEND }
|
||||
|
||||
/** Default value is 128. Change using {@link #setTermIndexInterval(int)}. */
|
||||
public static final int DEFAULT_TERM_INDEX_INTERVAL = 128;
|
||||
|
||||
/** Denotes a flush trigger is disabled. */
|
||||
public final static int DISABLE_AUTO_FLUSH = -1;
|
||||
|
||||
/** Disabled by default (because IndexWriter flushes by RAM usage by default). */
|
||||
public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
|
||||
|
||||
/** Disabled by default (because IndexWriter flushes by RAM usage by default). */
|
||||
public final static int DEFAULT_MAX_BUFFERED_DOCS = DISABLE_AUTO_FLUSH;
|
||||
|
||||
/**
|
||||
* Default value is 16 MB (which means flush when buffered docs consume
|
||||
* approximately 16 MB RAM).
|
||||
*/
|
||||
public final static double DEFAULT_RAM_BUFFER_SIZE_MB = 16.0;
|
||||
|
||||
/**
|
||||
* Default value for the write lock timeout (1,000 ms).
|
||||
*
|
||||
* @see #setDefaultWriteLockTimeout(long)
|
||||
*/
|
||||
public static long WRITE_LOCK_TIMEOUT = 1000;
|
||||
|
||||
/**
|
||||
* Sets the default (for any instance) maximum time to wait for a write lock
|
||||
* (in milliseconds).
|
||||
*/
|
||||
public static void setDefaultWriteLockTimeout(long writeLockTimeout) {
|
||||
WRITE_LOCK_TIMEOUT = writeLockTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the default write lock timeout for newly instantiated
|
||||
* IndexWriterConfigs.
|
||||
*
|
||||
* @see #setDefaultWriteLockTimeout(long)
|
||||
*/
|
||||
public static long getDefaultWriteLockTimeout() {
|
||||
return WRITE_LOCK_TIMEOUT;
|
||||
}
|
||||
|
||||
private Analyzer analyzer;
|
||||
private IndexDeletionPolicy delPolicy;
|
||||
private IndexCommit commit;
|
||||
private OpenMode openMode;
|
||||
private int maxFieldLength;
|
||||
private Similarity similarity;
|
||||
private int termIndexInterval;
|
||||
private MergeScheduler mergeScheduler;
|
||||
private long writeLockTimeout;
|
||||
private int maxBufferedDeleteTerms;
|
||||
private double ramBufferSizeMB;
|
||||
private int maxBufferedDocs;
|
||||
private IndexingChain indexingChain;
|
||||
|
||||
// required for clone
|
||||
private Version matchVersion;
|
||||
|
||||
/**
|
||||
* Creates a new config that with defaults that match the specified
|
||||
* {@link Version}. {@link Version} is a placeholder for future changes. The
|
||||
* default settings are relevant to 3.1 and before. In the future, if
|
||||
* different settings will apply to different versions, they will be
|
||||
* documented here.
|
||||
*/
|
||||
public IndexWriterConfig(Version matchVersion) {
|
||||
this.matchVersion = matchVersion;
|
||||
analyzer = new WhitespaceAnalyzer(matchVersion);
|
||||
delPolicy = new KeepOnlyLastCommitDeletionPolicy();
|
||||
commit = null;
|
||||
openMode = OpenMode.CREATE_OR_APPEND;
|
||||
maxFieldLength = UNLIMITED_FIELD_LENGTH;
|
||||
similarity = Similarity.getDefault();
|
||||
termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL;
|
||||
mergeScheduler = new ConcurrentMergeScheduler();
|
||||
writeLockTimeout = WRITE_LOCK_TIMEOUT;
|
||||
maxBufferedDeleteTerms = DEFAULT_MAX_BUFFERED_DELETE_TERMS;
|
||||
ramBufferSizeMB = DEFAULT_RAM_BUFFER_SIZE_MB;
|
||||
maxBufferedDocs = DEFAULT_MAX_BUFFERED_DOCS;
|
||||
indexingChain = DocumentsWriter.defaultIndexingChain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object clone() {
|
||||
// Shallow clone is the only thing that's possible, since parameters like
|
||||
// analyzer, index commit etc. do not implemnt Cloneable.
|
||||
try {
|
||||
return super.clone();
|
||||
} catch (CloneNotSupportedException e) {
|
||||
// should not happen
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the default {@link Analyzer} to be used when indexing documents. The
|
||||
* default {@link WhitespaceAnalyzer} is set for convenience (e.g. for test
|
||||
* purposes or when the analyzer used does not make a difference) and it's
|
||||
* recommended to override the default setting if you care about the tokens
|
||||
* that end up in your index.
|
||||
* <p>
|
||||
* <b>NOTE:</b> the analyzer cannot be null. If <code>null</code> is passed,
|
||||
* the analyzer will be set to the default.
|
||||
*/
|
||||
public IndexWriterConfig setAnalyzer(Analyzer analyzer) {
|
||||
this.analyzer = analyzer == null ? new WhitespaceAnalyzer(matchVersion) : analyzer;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the default analyzer to use for indexing documents. */
|
||||
public Analyzer getAnalyzer() {
|
||||
return analyzer;
|
||||
}
|
||||
|
||||
/** Specifies {@link OpenMode} of that index. */
|
||||
public IndexWriterConfig setOpenMode(OpenMode openMode) {
|
||||
this.openMode = openMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the {@link OpenMode} set by {@link #setOpenMode(OpenMode)}. */
|
||||
public OpenMode getOpenMode() {
|
||||
return openMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: allows an optional {@link IndexDeletionPolicy} implementation to be
|
||||
* specified. You can use this to control when prior commits are deleted from
|
||||
* the index. The default policy is {@link KeepOnlyLastCommitDeletionPolicy}
|
||||
* which removes all prior commits as soon as a new commit is done (this
|
||||
* matches behavior before 2.2). Creating your own policy can allow you to
|
||||
* explicitly keep previous "point in time" commits alive in the index for
|
||||
* some time, to allow readers to refresh to the new commit without having the
|
||||
* old commit deleted out from under them. This is necessary on filesystems
|
||||
* like NFS that do not support "delete on last close" semantics, which
|
||||
* Lucene's "point in time" search normally relies on.
|
||||
* <p>
|
||||
* <b>NOTE:</b> the deletion policy cannot be null. If <code>null</code> is
|
||||
* passed, the deletion policy will be set to the default.
|
||||
*/
|
||||
public IndexWriterConfig setIndexDeletionPolicy(IndexDeletionPolicy delPolicy) {
|
||||
this.delPolicy = delPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : delPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link IndexDeletionPolicy} specified in
|
||||
* {@link #setIndexDeletionPolicy(IndexDeletionPolicy)} or the default
|
||||
* {@link KeepOnlyLastCommitDeletionPolicy}/
|
||||
*/
|
||||
public IndexDeletionPolicy getIndexDeletionPolicy() {
|
||||
return delPolicy;
|
||||
}
|
||||
|
||||
/**
|
||||
* The maximum number of terms that will be indexed for a single field in a
|
||||
* document. This limits the amount of memory required for indexing, so that
|
||||
* collections with very large files will not crash the indexing process by
|
||||
* running out of memory. This setting refers to the number of running terms,
|
||||
* not to the number of different terms.
|
||||
* <p>
|
||||
* <b>NOTE:</b> this silently truncates large documents, excluding from the
|
||||
* index all terms that occur further in the document. If you know your source
|
||||
* documents are large, be sure to set this value high enough to accomodate
|
||||
* the expected size. If you set it to {@link #UNLIMITED_FIELD_LENGTH}, then
|
||||
* the only limit is your memory, but you should anticipate an
|
||||
* OutOfMemoryError.
|
||||
* <p>
|
||||
* By default it is set to {@link #UNLIMITED_FIELD_LENGTH}.
|
||||
*/
|
||||
public IndexWriterConfig setMaxFieldLength(int maxFieldLength) {
|
||||
this.maxFieldLength = maxFieldLength;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the maximum number of terms that will be indexed for a single field
|
||||
* in a document.
|
||||
*
|
||||
* @see #setMaxFieldLength(int)
|
||||
*/
|
||||
public int getMaxFieldLength() {
|
||||
return maxFieldLength;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: allows to open a certain commit point. The default is null which
|
||||
* opens the latest commit point.
|
||||
*/
|
||||
public IndexWriterConfig setIndexCommit(IndexCommit commit) {
|
||||
this.commit = commit;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link IndexCommit} as specified in
|
||||
* {@link #setIndexCommit(IndexCommit)} or the default, <code>null</code>
|
||||
* which specifies to open the latest index commit point.
|
||||
*/
|
||||
public IndexCommit getIndexCommit() {
|
||||
return commit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the {@link Similarity} implementation used by this IndexWriter.
|
||||
* <p>
|
||||
* <b>NOTE:</b> the similarity cannot be null. If <code>null</code> is passed,
|
||||
* the similarity will be set to the default.
|
||||
*
|
||||
* @see Similarity#setDefault(Similarity)
|
||||
*/
|
||||
public IndexWriterConfig setSimilarity(Similarity similarity) {
|
||||
this.similarity = similarity == null ? Similarity.getDefault() : similarity;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: returns the {@link Similarity} implementation used by this
|
||||
* IndexWriter. This defaults to the current value of
|
||||
* {@link Similarity#getDefault()}.
|
||||
*/
|
||||
public Similarity getSimilarity() {
|
||||
return similarity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: set the interval between indexed terms. Large values cause less
|
||||
* memory to be used by IndexReader, but slow random-access to terms. Small
|
||||
* values cause more memory to be used by an IndexReader, and speed
|
||||
* random-access to terms.
|
||||
* <p>
|
||||
* This parameter determines the amount of computation required per query
|
||||
* term, regardless of the number of documents that contain that term. In
|
||||
* particular, it is the maximum number of other terms that must be scanned
|
||||
* before a term is located and its frequency and position information may be
|
||||
* processed. In a large index with user-entered query terms, query processing
|
||||
* time is likely to be dominated not by term lookup but rather by the
|
||||
* processing of frequency and positional data. In a small index or when many
|
||||
* uncommon query terms are generated (e.g., by wildcard queries) term lookup
|
||||
* may become a dominant cost.
|
||||
* <p>
|
||||
* In particular, <code>numUniqueTerms/interval</code> terms are read into
|
||||
* memory by an IndexReader, and, on average, <code>interval/2</code> terms
|
||||
* must be scanned for each random term access.
|
||||
*
|
||||
* @see #DEFAULT_TERM_INDEX_INTERVAL
|
||||
*/
|
||||
public IndexWriterConfig setTermIndexInterval(int interval) {
|
||||
this.termIndexInterval = interval;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the interval between indexed terms.
|
||||
*
|
||||
* @see #setTermIndexInterval(int)
|
||||
*/
|
||||
public int getTermIndexInterval() {
|
||||
return termIndexInterval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expert: sets the merge scheduler used by this writer. The default is
|
||||
* {@link ConcurrentMergeScheduler}.
|
||||
* <p>
|
||||
* <b>NOTE:</b> the merge scheduler cannot be null. If <code>null</code> is
|
||||
* passed, the merge scheduler will be set to the default.
|
||||
*/
|
||||
public IndexWriterConfig setMergeScheduler(MergeScheduler mergeScheduler) {
|
||||
this.mergeScheduler = mergeScheduler == null ? new ConcurrentMergeScheduler() : mergeScheduler;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link MergeScheduler} that was set by
|
||||
* {@link #setMergeScheduler(MergeScheduler)}
|
||||
*/
|
||||
public MergeScheduler getMergeScheduler() {
|
||||
return mergeScheduler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the maximum time to wait for a write lock (in milliseconds) for this
|
||||
* instance. You can change the default value for all instances by calling
|
||||
* {@link #setDefaultWriteLockTimeout(long)}.
|
||||
*/
|
||||
public IndexWriterConfig setWriteLockTimeout(long writeLockTimeout) {
|
||||
this.writeLockTimeout = writeLockTimeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns allowed timeout when acquiring the write lock.
|
||||
*
|
||||
* @see #setWriteLockTimeout(long)
|
||||
*/
|
||||
public long getWriteLockTimeout() {
|
||||
return writeLockTimeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the minimal number of delete terms required before the buffered
|
||||
* in-memory delete terms are applied and flushed. If there are documents
|
||||
* buffered in memory at the time, they are merged and a new segment is
|
||||
* created.
|
||||
|
||||
* <p>Disabled by default (writer flushes by RAM usage).
|
||||
*
|
||||
* @throws IllegalArgumentException if maxBufferedDeleteTerms
|
||||
* is enabled but smaller than 1
|
||||
* @see #setRAMBufferSizeMB
|
||||
*/
|
||||
public IndexWriterConfig setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
|
||||
if (maxBufferedDeleteTerms != DISABLE_AUTO_FLUSH
|
||||
&& maxBufferedDeleteTerms < 1)
|
||||
throw new IllegalArgumentException(
|
||||
"maxBufferedDeleteTerms must at least be 1 when enabled");
|
||||
this.maxBufferedDeleteTerms = maxBufferedDeleteTerms;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of buffered deleted terms that will trigger a flush if
|
||||
* enabled.
|
||||
*
|
||||
* @see #setMaxBufferedDeleteTerms(int)
|
||||
*/
|
||||
public int getMaxBufferedDeleteTerms() {
|
||||
return maxBufferedDeleteTerms;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the amount of RAM that may be used for buffering added documents
|
||||
* and deletions before they are flushed to the Directory. Generally for
|
||||
* faster indexing performance it's best to flush by RAM usage instead of
|
||||
* document count and use as large a RAM buffer as you can.
|
||||
*
|
||||
* <p>
|
||||
* When this is set, the writer will flush whenever buffered documents and
|
||||
* deletions use this much RAM. Pass in {@link #DISABLE_AUTO_FLUSH} to prevent
|
||||
* triggering a flush due to RAM usage. Note that if flushing by document
|
||||
* count is also enabled, then the flush will be triggered by whichever comes
|
||||
* first.
|
||||
*
|
||||
* <p>
|
||||
* <b>NOTE</b>: the account of RAM usage for pending deletions is only
|
||||
* approximate. Specifically, if you delete by Query, Lucene currently has no
|
||||
* way to measure the RAM usage of individual Queries so the accounting will
|
||||
* under-estimate and you should compensate by either calling commit()
|
||||
* periodically yourself, or by using {@link #setMaxBufferedDeleteTerms(int)}
|
||||
* to flush by count instead of RAM usage (each buffered delete Query counts
|
||||
* as one).
|
||||
*
|
||||
* <p>
|
||||
* <b>NOTE</b>: because IndexWriter uses <code>int</code>s when managing its
|
||||
* internal storage, the absolute maximum value for this setting is somewhat
|
||||
* less than 2048 MB. The precise limit depends on various factors, such as
|
||||
* how large your documents are, how many fields have norms, etc., so it's
|
||||
* best to set this value comfortably under 2048.
|
||||
*
|
||||
* <p>
|
||||
* The default value is {@link #DEFAULT_RAM_BUFFER_SIZE_MB}.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* if ramBufferSize is enabled but non-positive, or it disables
|
||||
* ramBufferSize when maxBufferedDocs is already disabled
|
||||
*/
|
||||
public IndexWriterConfig setRAMBufferSizeMB(double ramBufferSizeMB) {
|
||||
if (ramBufferSizeMB > 2048.0) {
|
||||
throw new IllegalArgumentException("ramBufferSize " + ramBufferSizeMB
|
||||
+ " is too large; should be comfortably less than 2048");
|
||||
}
|
||||
if (ramBufferSizeMB != DISABLE_AUTO_FLUSH && ramBufferSizeMB <= 0.0)
|
||||
throw new IllegalArgumentException(
|
||||
"ramBufferSize should be > 0.0 MB when enabled");
|
||||
if (ramBufferSizeMB == DISABLE_AUTO_FLUSH && maxBufferedDocs == DISABLE_AUTO_FLUSH)
|
||||
throw new IllegalArgumentException(
|
||||
"at least one of ramBufferSize and maxBufferedDocs must be enabled");
|
||||
this.ramBufferSizeMB = ramBufferSizeMB;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the value set by {@link #setRAMBufferSizeMB(double)} if enabled. */
|
||||
public double getRAMBufferSizeMB() {
|
||||
return ramBufferSizeMB;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the minimal number of documents required before the buffered
|
||||
* in-memory documents are flushed as a new Segment. Large values generally
|
||||
* give faster indexing.
|
||||
*
|
||||
* <p>
|
||||
* When this is set, the writer will flush every maxBufferedDocs added
|
||||
* documents. Pass in {@link #DISABLE_AUTO_FLUSH} to prevent triggering a
|
||||
* flush due to number of buffered documents. Note that if flushing by RAM
|
||||
* usage is also enabled, then the flush will be triggered by whichever comes
|
||||
* first.
|
||||
*
|
||||
* <p>
|
||||
* Disabled by default (writer flushes by RAM usage).
|
||||
*
|
||||
* @see #setRAMBufferSizeMB(double)
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
* if maxBufferedDocs is enabled but smaller than 2, or it disables
|
||||
* maxBufferedDocs when ramBufferSize is already disabled
|
||||
*/
|
||||
public IndexWriterConfig setMaxBufferedDocs(int maxBufferedDocs) {
|
||||
if (maxBufferedDocs != DISABLE_AUTO_FLUSH && maxBufferedDocs < 2)
|
||||
throw new IllegalArgumentException(
|
||||
"maxBufferedDocs must at least be 2 when enabled");
|
||||
if (maxBufferedDocs == DISABLE_AUTO_FLUSH
|
||||
&& ramBufferSizeMB == DISABLE_AUTO_FLUSH)
|
||||
throw new IllegalArgumentException(
|
||||
"at least one of ramBufferSize and maxBufferedDocs must be enabled");
|
||||
this.maxBufferedDocs = maxBufferedDocs;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of buffered added documents that will trigger a flush if
|
||||
* enabled.
|
||||
*
|
||||
* @see #setMaxBufferedDocs(int)
|
||||
*/
|
||||
public int getMaxBufferedDocs() {
|
||||
return maxBufferedDocs;
|
||||
}
|
||||
|
||||
/** Expert: sets the {@link DocConsumer} chain to be used to process documents. */
|
||||
IndexWriterConfig setIndexingChain(IndexingChain indexingChain) {
|
||||
this.indexingChain = indexingChain == null ? DocumentsWriter.defaultIndexingChain : indexingChain;
|
||||
return this;
|
||||
}
|
||||
|
||||
/** Returns the indexing chain set on {@link #setIndexingChain(IndexingChain)}. */
|
||||
IndexingChain getIndexingChain() {
|
||||
return indexingChain;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("matchVersion=").append(matchVersion).append("\n");
|
||||
sb.append("analyzer=").append(analyzer.getClass().getName()).append("\n");
|
||||
sb.append("delPolicy=").append(delPolicy.getClass().getName()).append("\n");
|
||||
sb.append("commit=").append(commit == null ? "null" : commit.getClass().getName()).append("\n");
|
||||
sb.append("openMode=").append(openMode).append("\n");
|
||||
sb.append("maxFieldLength=").append(maxFieldLength).append("\n");
|
||||
sb.append("similarity=").append(similarity.getClass().getName()).append("\n");
|
||||
sb.append("termIndexInterval=").append(termIndexInterval).append("\n");
|
||||
sb.append("mergeScheduler=").append(mergeScheduler.getClass().getName()).append("\n");
|
||||
sb.append("default WRITE_LOCK_TIMEOUT=").append(WRITE_LOCK_TIMEOUT).append("\n");
|
||||
sb.append("writeLockTimeout=").append(writeLockTimeout).append("\n");
|
||||
sb.append("maxBufferedDeleteTerms=").append(maxBufferedDeleteTerms).append("\n");
|
||||
sb.append("ramBufferSizeMB=").append(ramBufferSizeMB).append("\n");
|
||||
sb.append("maxBufferedDocs=").append(maxBufferedDocs).append("\n");
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
|
@ -48,7 +48,7 @@ final class SegmentMerger {
|
|||
|
||||
private Directory directory;
|
||||
private String segment;
|
||||
private int termIndexInterval = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
|
||||
private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
|
||||
|
||||
private List<IndexReader> readers = new ArrayList<IndexReader>();
|
||||
private FieldInfos fieldInfos;
|
||||
|
@ -96,7 +96,7 @@ final class SegmentMerger {
|
|||
}
|
||||
};
|
||||
}
|
||||
termIndexInterval = writer.getConfig().getTermIndexInterval();
|
||||
termIndexInterval = writer.getTermIndexInterval();
|
||||
}
|
||||
|
||||
boolean hasProx() {
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.queryParser.ParseException;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -50,9 +49,8 @@ public class TestDemo extends LuceneTestCase {
|
|||
Directory directory = new RAMDirectory();
|
||||
// To store an index on disk, use this instead:
|
||||
//Directory directory = FSDirectory.open("/tmp/testindex");
|
||||
IndexWriter iwriter = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setMaxFieldLength(25000));
|
||||
|
||||
IndexWriter iwriter = new IndexWriter(directory, analyzer, true,
|
||||
new IndexWriter.MaxFieldLength(25000));
|
||||
Document doc = new Document();
|
||||
String text = "This is the text to be indexed.";
|
||||
doc.add(new Field("fieldname", text, Field.Store.YES,
|
||||
|
|
|
@ -18,9 +18,9 @@ package org.apache.lucene;
|
|||
*/
|
||||
import java.io.IOException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MergePolicy;
|
||||
import org.apache.lucene.index.ConcurrentMergeScheduler;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -86,14 +86,15 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
|
|||
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||
doc.add(idField);
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMergeScheduler(new MyMergeScheduler())
|
||||
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
|
||||
IndexWriterConfig.DISABLE_AUTO_FLUSH));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
MyMergeScheduler ms = new MyMergeScheduler();
|
||||
writer.setMergeScheduler(ms);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
|
||||
for(int i=0;i<20;i++)
|
||||
writer.addDocument(doc);
|
||||
|
||||
((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
|
||||
ms.sync();
|
||||
writer.close();
|
||||
|
||||
assertTrue(mergeThreadCreated);
|
||||
|
|
|
@ -70,14 +70,14 @@ public class TestSearch extends LuceneTestCase {
|
|||
|
||||
|
||||
private void doTestSearch(PrintWriter out, boolean useCompoundFile)
|
||||
throws Exception {
|
||||
throws Exception
|
||||
{
|
||||
Directory directory = new RAMDirectory();
|
||||
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
|
||||
String[] docs = {
|
||||
"a b c d e",
|
||||
|
|
|
@ -78,11 +78,10 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
|||
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
|
||||
Directory directory = new RAMDirectory();
|
||||
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFiles);
|
||||
lmp.setUseCompoundDocStore(useCompoundFiles);
|
||||
IndexWriter writer = new IndexWriter(directory, analyzer, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setUseCompoundFile(useCompoundFiles);
|
||||
|
||||
final int MAX_DOCS = 225;
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.lucene.store.IndexInput;
|
|||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.TestIndexWriter;
|
||||
|
@ -68,10 +67,9 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
|||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(
|
||||
new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
|
||||
.setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
// Force frequent flushes
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
for(int i=0;i<7;i++) {
|
||||
|
@ -85,9 +83,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
|||
writer.close();
|
||||
copyFiles(dir, cp);
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
|
||||
.setIndexDeletionPolicy(dp));
|
||||
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
copyFiles(dir, cp);
|
||||
for(int i=0;i<7;i++) {
|
||||
writer.addDocument(doc);
|
||||
|
@ -99,9 +95,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
|||
writer.close();
|
||||
copyFiles(dir, cp);
|
||||
dp.release();
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT))
|
||||
.setIndexDeletionPolicy(dp));
|
||||
writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.close();
|
||||
try {
|
||||
copyFiles(dir, cp);
|
||||
|
@ -117,10 +111,10 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
|
|||
final long stopTime = System.currentTimeMillis() + 1000;
|
||||
|
||||
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
|
||||
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(
|
||||
new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
|
||||
.setMaxBufferedDocs(2));
|
||||
final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
// Force frequent flushes
|
||||
writer.setMaxBufferedDocs(2);
|
||||
|
||||
final Thread t = new Thread() {
|
||||
@Override
|
||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermPositions;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -38,7 +37,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testCaching() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
TokenStream stream = new TokenStream() {
|
||||
private int index = 0;
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermDocs;
|
||||
import org.apache.lucene.queryParser.QueryParser;
|
||||
|
@ -42,9 +41,9 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(directory,
|
||||
new SimpleAnalyzer(TEST_VERSION_CURRENT),
|
||||
true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
@ -71,7 +70,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
|
||||
public void testMutipleDocument() throws Exception {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new KeywordAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir,new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.lucene.analysis.PerFieldAnalyzerWrapper;
|
|||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -70,7 +69,8 @@ public class CollationTestBase extends LuceneTestCase {
|
|||
String firstEnd, String secondBeg,
|
||||
String secondEnd) throws Exception {
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter
|
||||
(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "\u0633\u0627\u0628",
|
||||
Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -101,7 +101,8 @@ public class CollationTestBase extends LuceneTestCase {
|
|||
String firstEnd, String secondBeg,
|
||||
String secondEnd) throws Exception {
|
||||
RAMDirectory ramDir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter
|
||||
(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
|
||||
// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
|
||||
|
@ -124,11 +125,13 @@ public class CollationTestBase extends LuceneTestCase {
|
|||
searcher.close();
|
||||
}
|
||||
|
||||
public void testFarsiTermRangeQuery(Analyzer analyzer, String firstBeg,
|
||||
String firstEnd, String secondBeg, String secondEnd) throws Exception {
|
||||
public void testFarsiTermRangeQuery
|
||||
(Analyzer analyzer, String firstBeg, String firstEnd,
|
||||
String secondBeg, String secondEnd) throws Exception {
|
||||
|
||||
RAMDirectory farsiIndex = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter
|
||||
(farsiIndex, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "\u0633\u0627\u0628",
|
||||
Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -175,7 +178,8 @@ public class CollationTestBase extends LuceneTestCase {
|
|||
analyzer.addAnalyzer("France", franceAnalyzer);
|
||||
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
|
||||
analyzer.addAnalyzer("Denmark", denmarkAnalyzer);
|
||||
IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter
|
||||
(indexStore, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// document data:
|
||||
// the tracer field is used to determine which document was hit
|
||||
|
|
|
@ -2,9 +2,9 @@ package org.apache.lucene.document;
|
|||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
|
||||
/**
|
||||
|
@ -27,7 +27,8 @@ import org.apache.lucene.store.MockRAMDirectory;
|
|||
/**
|
||||
* Tests {@link Document} class.
|
||||
*/
|
||||
public class TestBinaryDocument extends LuceneTestCase {
|
||||
public class TestBinaryDocument extends LuceneTestCase
|
||||
{
|
||||
|
||||
String binaryValStored = "this text will be stored as a byte array in the index";
|
||||
String binaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
|
||||
|
@ -57,7 +58,7 @@ public class TestBinaryDocument extends LuceneTestCase {
|
|||
|
||||
/** add the doc to a ram index */
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
@ -82,7 +83,9 @@ public class TestBinaryDocument extends LuceneTestCase {
|
|||
dir.close();
|
||||
}
|
||||
|
||||
public void testCompressionTools() throws Exception {
|
||||
public void testCompressionTools()
|
||||
throws Exception
|
||||
{
|
||||
Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.compress(binaryValCompressed.getBytes()));
|
||||
Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.compressString(binaryValCompressed));
|
||||
|
||||
|
@ -93,7 +96,7 @@ public class TestBinaryDocument extends LuceneTestCase {
|
|||
|
||||
/** add the doc to a ram index */
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ package org.apache.lucene.document;
|
|||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -152,11 +151,10 @@ public class TestDocument extends LuceneTestCase
|
|||
*
|
||||
* @throws Exception on error
|
||||
*/
|
||||
public void testGetValuesForIndexedDocument() throws Exception {
|
||||
public void testGetValuesForIndexedDocument() throws Exception
|
||||
{
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(makeDocumentWithFields());
|
||||
writer.close();
|
||||
|
||||
|
@ -227,9 +225,7 @@ public class TestDocument extends LuceneTestCase
|
|||
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(doc);
|
||||
field.setValue("id2");
|
||||
writer.addDocument(doc);
|
||||
|
|
|
@ -232,9 +232,10 @@ class DocHelper {
|
|||
* @param doc
|
||||
* @throws IOException
|
||||
*/
|
||||
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(analyzer).setSimilarity(similarity));
|
||||
public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException
|
||||
{
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setSimilarity(similarity);
|
||||
//writer.setUseCompoundFile(false);
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
|
@ -39,28 +39,27 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = newWriter(dir, true);
|
||||
// add 100 documents
|
||||
addDocs(writer, 100);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
|
||||
writer = newWriter(aux, true);
|
||||
writer.setUseCompoundFile(false); // use one without a compound file
|
||||
// add 40 documents in separate files
|
||||
addDocs(writer, 40);
|
||||
assertEquals(40, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(aux2, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = newWriter(aux2, true);
|
||||
// add 40 documents in compound files
|
||||
addDocs2(writer, 50);
|
||||
assertEquals(50, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
// test doc count before segments are merged
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, false);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
|
||||
assertEquals(190, writer.maxDoc());
|
||||
|
@ -74,14 +73,14 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
// now add another set in.
|
||||
Directory aux3 = new RAMDirectory();
|
||||
writer = newWriter(aux3, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = newWriter(aux3, true);
|
||||
// add 40 documents
|
||||
addDocs(writer, 40);
|
||||
assertEquals(40, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
// test doc count before segments are merged/index is optimized
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, false);
|
||||
assertEquals(190, writer.maxDoc());
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux3 });
|
||||
assertEquals(230, writer.maxDoc());
|
||||
|
@ -95,7 +94,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
verifyTermDocs(dir, new Term("content", "bbb"), 50);
|
||||
|
||||
// now optimize it.
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, false);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -108,11 +107,11 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
// now add a single document
|
||||
Directory aux4 = new RAMDirectory();
|
||||
writer = newWriter(aux4, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = newWriter(aux4, true);
|
||||
addDocs2(writer, 1);
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, false);
|
||||
assertEquals(230, writer.maxDoc());
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux4 });
|
||||
assertEquals(231, writer.maxDoc());
|
||||
|
@ -130,7 +129,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
Directory aux = new RAMDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
writer.addIndexesNoOptimize(new Directory[] {aux});
|
||||
|
||||
// Adds 10 docs, then replaces them with another 10
|
||||
|
@ -167,7 +166,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
Directory aux = new RAMDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
|
||||
// Adds 10 docs, then replaces them with another 10
|
||||
// docs, so 10 pending deletes:
|
||||
|
@ -206,7 +205,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
Directory aux = new RAMDirectory();
|
||||
|
||||
setUpDirs(dir, aux);
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
|
||||
// Adds 10 docs, then replaces them with another 10
|
||||
// docs, so 10 pending deletes:
|
||||
|
@ -247,25 +246,25 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = newWriter(dir, true);
|
||||
// add 100 documents
|
||||
addDocs(writer, 100);
|
||||
assertEquals(100, writer.maxDoc());
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
|
||||
writer = newWriter(aux, true);
|
||||
writer.setUseCompoundFile(false); // use one without a compound file
|
||||
writer.setMaxBufferedDocs(1000);
|
||||
// add 140 documents in separate files
|
||||
addDocs(writer, 40);
|
||||
writer.close();
|
||||
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
|
||||
writer = newWriter(aux, true);
|
||||
writer.setUseCompoundFile(false); // use one without a compound file
|
||||
writer.setMaxBufferedDocs(1000);
|
||||
addDocs(writer, 100);
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = newWriter(dir, false);
|
||||
try {
|
||||
// cannot add self
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux, dir });
|
||||
|
@ -291,10 +290,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
setUpDirs(dir, aux);
|
||||
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
|
||||
10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(4);
|
||||
addDocs(writer, 10);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux });
|
||||
|
@ -316,8 +314,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
setUpDirs(dir, aux);
|
||||
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
writer.setMaxBufferedDocs(9);
|
||||
writer.setMergeFactor(4);
|
||||
addDocs(writer, 2);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux });
|
||||
|
@ -339,10 +338,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
setUpDirs(dir, aux);
|
||||
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(
|
||||
10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(4);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
|
||||
assertEquals(1060, writer.maxDoc());
|
||||
|
@ -369,10 +367,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
assertEquals(10, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
|
||||
.setMaxBufferedDocs(4));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
|
||||
IndexWriter writer = newWriter(dir, false);
|
||||
writer.setMaxBufferedDocs(4);
|
||||
writer.setMergeFactor(4);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
|
||||
assertEquals(1020, writer.maxDoc());
|
||||
|
@ -393,10 +390,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
|
||||
setUpDirs(dir, aux);
|
||||
|
||||
IndexWriter writer = newWriter(aux2, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(
|
||||
100));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
IndexWriter writer = newWriter(aux2, true);
|
||||
writer.setMaxBufferedDocs(100);
|
||||
writer.setMergeFactor(10);
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux });
|
||||
assertEquals(30, writer.maxDoc());
|
||||
assertEquals(3, writer.getSegmentCount());
|
||||
|
@ -416,9 +412,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
assertEquals(22, reader.numDocs());
|
||||
reader.close();
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(6));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
|
||||
writer = newWriter(dir, false);
|
||||
writer.setMaxBufferedDocs(6);
|
||||
writer.setMergeFactor(4);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
|
||||
assertEquals(1025, writer.maxDoc());
|
||||
|
@ -429,9 +425,9 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
verifyNumDocs(dir, 1025);
|
||||
}
|
||||
|
||||
private IndexWriter newWriter(Directory dir, IndexWriterConfig conf)
|
||||
private IndexWriter newWriter(Directory dir, boolean create)
|
||||
throws IOException {
|
||||
final IndexWriter writer = new IndexWriter(dir, conf);
|
||||
final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
return writer;
|
||||
}
|
||||
|
@ -475,25 +471,26 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
private void setUpDirs(Directory dir, Directory aux) throws IOException {
|
||||
IndexWriter writer = null;
|
||||
|
||||
writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
|
||||
writer = newWriter(dir, true);
|
||||
writer.setMaxBufferedDocs(1000);
|
||||
// add 1000 documents in 1 segment
|
||||
addDocs(writer, 1000);
|
||||
assertEquals(1000, writer.maxDoc());
|
||||
assertEquals(1, writer.getSegmentCount());
|
||||
writer.close();
|
||||
|
||||
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
writer = newWriter(aux, true);
|
||||
writer.setUseCompoundFile(false); // use one without a compound file
|
||||
writer.setMaxBufferedDocs(100);
|
||||
writer.setMergeFactor(10);
|
||||
// add 30 documents in 3 segments
|
||||
for (int i = 0; i < 3; i++) {
|
||||
addDocs(writer, 10);
|
||||
writer.close();
|
||||
writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
writer = newWriter(aux, false);
|
||||
writer.setUseCompoundFile(false); // use one without a compound file
|
||||
writer.setMaxBufferedDocs(100);
|
||||
writer.setMergeFactor(10);
|
||||
}
|
||||
assertEquals(30, writer.maxDoc());
|
||||
assertEquals(3, writer.getSegmentCount());
|
||||
|
@ -504,19 +501,18 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
public void testHangOnClose() throws IOException {
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(5));
|
||||
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
|
||||
lmp.setUseCompoundFile(false);
|
||||
lmp.setUseCompoundDocStore(false);
|
||||
lmp.setMergeFactor(100);
|
||||
writer.setMergePolicy(lmp);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
|
||||
writer.setMaxBufferedDocs(5);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.setMergeFactor(100);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
for(int i=0;i<60;i++)
|
||||
writer.addDocument(doc);
|
||||
|
||||
writer.setMaxBufferedDocs(200);
|
||||
Document doc2 = new Document();
|
||||
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||
Field.Index.NO));
|
||||
|
@ -531,13 +527,13 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
writer = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT).setMergeScheduler(new SerialMergeScheduler()));
|
||||
lmp = new LogByteSizeMergePolicy(writer);
|
||||
writer = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
|
||||
lmp.setMinMergeMB(0.0001);
|
||||
lmp.setUseCompoundFile(false);
|
||||
lmp.setUseCompoundDocStore(false);
|
||||
lmp.setMergeFactor(4);
|
||||
writer.setMergePolicy(lmp);
|
||||
writer.setMergeFactor(4);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||
writer.addIndexesNoOptimize(new Directory[] {dir});
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -548,16 +544,14 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
|||
// is respected when copying tail segments
|
||||
public void testTargetCFS() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
IndexWriter writer = newWriter(dir, true);
|
||||
writer.setUseCompoundFile(false);
|
||||
addDocs(writer, 1);
|
||||
writer.close();
|
||||
|
||||
Directory other = new RAMDirectory();
|
||||
writer = newWriter(other, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true);
|
||||
writer = newWriter(other, true);
|
||||
writer.setUseCompoundFile(true);
|
||||
writer.addIndexesNoOptimize(new Directory[] {dir});
|
||||
assertTrue(writer.newestSegment().getUseCompoundFile());
|
||||
writer.close();
|
||||
|
|
|
@ -19,19 +19,20 @@ package org.apache.lucene.index;
|
|||
import org.apache.lucene.util.*;
|
||||
import org.apache.lucene.store.*;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.analysis.*;
|
||||
|
||||
import java.util.Random;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
public class TestAtomicUpdate extends LuceneTestCase {
|
||||
|
||||
private static final class MockIndexWriter extends IndexWriter {
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
private Random RANDOM;
|
||||
|
||||
static Random RANDOM;
|
||||
public class MockIndexWriter extends IndexWriter {
|
||||
|
||||
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
|
||||
super(dir, conf);
|
||||
public MockIndexWriter(Directory dir, Analyzer a, boolean create, IndexWriter.MaxFieldLength mfl) throws IOException {
|
||||
super(dir, a, create, mfl);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -125,8 +126,9 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
|
||||
TimedThread[] threads = new TimedThread[4];
|
||||
|
||||
IndexWriter writer = new MockIndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(7));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
|
||||
IndexWriter writer = new MockIndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(7);
|
||||
writer.setMergeFactor(3);
|
||||
|
||||
// Establish a base index of 100 docs:
|
||||
for(int i=0;i<100;i++) {
|
||||
|
@ -181,7 +183,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
|
|||
FSDirectory.
|
||||
*/
|
||||
public void testAtomicUpdates() throws Exception {
|
||||
MockIndexWriter.RANDOM = newRandom();
|
||||
RANDOM = newRandom();
|
||||
Directory directory;
|
||||
|
||||
// First in a RAM directory:
|
||||
|
|
|
@ -32,12 +32,12 @@ import java.util.ArrayList;
|
|||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipFile;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.document.FieldSelector;
|
||||
import org.apache.lucene.document.FieldSelectorResult;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
|
@ -52,7 +52,8 @@ import org.apache.lucene.util._TestUtil;
|
|||
against it, and add documents to it.
|
||||
*/
|
||||
|
||||
public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||
public class TestBackwardsCompatibility extends LuceneTestCase
|
||||
{
|
||||
|
||||
// Uncomment these cases & run them on an older Lucene
|
||||
// version, to generate an index to test backwards
|
||||
|
@ -214,7 +215,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
hasTested29++;
|
||||
}
|
||||
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
|
||||
|
@ -354,7 +355,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
|
||||
// open writer
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
// add 10 docs
|
||||
for(int i=0;i<10;i++) {
|
||||
|
@ -398,7 +399,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -448,7 +449,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
searcher.close();
|
||||
|
||||
// optimize
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -470,9 +471,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
dirName = fullDir(dirName);
|
||||
|
||||
Directory dir = FSDirectory.open(new File(dirName));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(doCFS);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
|
||||
for(int i=0;i<35;i++) {
|
||||
addDoc(writer, i);
|
||||
|
@ -481,9 +482,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// open fresh writer so we get no prx file in the added segment
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(doCFS);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
addNoProxDoc(writer);
|
||||
writer.close();
|
||||
|
||||
|
@ -508,7 +509,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
try {
|
||||
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setRAMBufferSizeMB(16.0);
|
||||
for(int i=0;i<35;i++) {
|
||||
addDoc(writer, i);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.util.ArrayList;
|
|||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.util.Constants;
|
||||
|
@ -33,7 +34,9 @@ public class TestCheckIndex extends LuceneTestCase {
|
|||
|
||||
public void testDeletedDocs() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
for(int i=0;i<19;i++) {
|
||||
|
|
|
@ -17,17 +17,20 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import java.io.IOException;
|
||||
|
||||
public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
||||
|
||||
private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
|
||||
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
|
||||
boolean doFail;
|
||||
boolean hitExc;
|
||||
|
@ -65,7 +68,10 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
FailOnlyOnFlush failure = new FailOnlyOnFlush();
|
||||
directory.failOn(failure);
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
writer.setMergeScheduler(cms);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
Document doc = new Document();
|
||||
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||
doc.add(idField);
|
||||
|
@ -109,7 +115,9 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
RAMDirectory directory = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
writer.setMergeScheduler(cms);
|
||||
|
||||
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
|
||||
writer.setMergePolicy(mp);
|
||||
|
@ -149,10 +157,12 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
RAMDirectory directory = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
for(int iter=0;iter<7;iter++) {
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
writer.setMergeScheduler(cms);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
|
||||
for(int j=0;j<21;j++) {
|
||||
Document doc = new Document();
|
||||
|
@ -164,9 +174,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
|
||||
|
||||
// Reopen
|
||||
writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
|
||||
.setMaxBufferedDocs(2));
|
||||
writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
}
|
||||
|
||||
writer.close();
|
||||
|
@ -181,10 +189,13 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
|
||||
doc.add(idField);
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
|
||||
IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
for(int iter=0;iter<10;iter++) {
|
||||
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
|
||||
writer.setMergeScheduler(cms);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.setMergeFactor(100);
|
||||
|
||||
for(int j=0;j<201;j++) {
|
||||
idField.setValue(Integer.toString(iter*201+j));
|
||||
|
@ -199,7 +210,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
|
||||
// Force a bunch of merge threads to kick off so we
|
||||
// stress out aborting them on close:
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
|
||||
writer.setMergeFactor(3);
|
||||
writer.addDocument(doc);
|
||||
writer.commit();
|
||||
|
||||
|
@ -210,8 +221,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
|
|||
reader.close();
|
||||
|
||||
// Reopen
|
||||
writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
|
||||
writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
}
|
||||
writer.close();
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.NoLockFactory;
|
||||
import org.apache.lucene.document.Document;
|
||||
|
@ -34,8 +35,10 @@ public class TestCrash extends LuceneTestCase {
|
|||
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
|
||||
dir.setLockFactory(NoLockFactory.getNoLockFactory());
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
//writer.setMaxBufferedDocs(2);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -48,7 +51,7 @@ public class TestCrash extends LuceneTestCase {
|
|||
|
||||
private void crash(final IndexWriter writer) throws IOException {
|
||||
final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
|
||||
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
|
||||
ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getMergeScheduler();
|
||||
dir.crash();
|
||||
cms.sync();
|
||||
dir.clearCrash();
|
||||
|
|
|
@ -23,9 +23,9 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
|
@ -40,8 +40,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
against it, and add documents to it.
|
||||
*/
|
||||
|
||||
public class TestDeletionPolicy extends LuceneTestCase {
|
||||
|
||||
public class TestDeletionPolicy extends LuceneTestCase
|
||||
{
|
||||
private void verifyCommitOrder(List<? extends IndexCommit> commits) throws IOException {
|
||||
final IndexCommit firstCommit = commits.get(0);
|
||||
long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
|
||||
|
@ -201,10 +201,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
|
||||
long lastDeleteTime = 0;
|
||||
|
@ -212,11 +210,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
// Record last time when writer performed deletes of
|
||||
// past commits
|
||||
lastDeleteTime = System.currentTimeMillis();
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -276,22 +271,17 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
Directory dir = new RAMDirectory();
|
||||
policy.dir = dir;
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(10).setMergeScheduler(new SerialMergeScheduler()));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||
for(int i=0;i<107;i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -328,9 +318,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
// Open & close a writer and assert that it
|
||||
// actually removed something:
|
||||
int preCount = dir.listAll().length;
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
|
||||
.setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
int postCount = dir.listAll().length;
|
||||
assertTrue(postCount < preCount);
|
||||
|
@ -352,9 +340,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
Directory dir = new MockRAMDirectory();
|
||||
policy.dir = dir;
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy)
|
||||
.setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<10;i++) {
|
||||
addDoc(writer);
|
||||
if ((1+i)%2 == 0)
|
||||
|
@ -372,7 +359,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertTrue(lastCommit != null);
|
||||
|
||||
// Now add 1 doc and optimize
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer);
|
||||
assertEquals(11, writer.numDocs());
|
||||
writer.optimize();
|
||||
|
@ -381,8 +368,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertEquals(7, IndexReader.listCommits(dir).size());
|
||||
|
||||
// Now open writer on the commit just before optimize:
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
// Should undo our rollback:
|
||||
|
@ -394,8 +380,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
assertEquals(11, r.numDocs());
|
||||
r.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
// Commits the rollback:
|
||||
writer.close();
|
||||
|
@ -411,7 +396,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
r.close();
|
||||
|
||||
// Reoptimize
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -422,7 +407,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
// Now open writer on the commit just before optimize,
|
||||
// but this time keeping only the last commit:
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setIndexCommit(lastCommit));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
|
||||
assertEquals(10, writer.numDocs());
|
||||
|
||||
// Reader still sees optimized index, because writer
|
||||
|
@ -458,22 +443,16 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int i=0;i<107;i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -507,12 +486,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
for(int j=0;j<N+1;j++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int i=0;i<17;i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -565,23 +541,15 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(policy));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
Term searchTerm = new Term("content", "aaa");
|
||||
Query query = new TermQuery(searchTerm);
|
||||
|
||||
for(int i=0;i<N+1;i++) {
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
|
||||
.setIndexDeletionPolicy(policy));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -597,11 +565,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
reader.close();
|
||||
searcher.close();
|
||||
}
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.optimize();
|
||||
// this is a commit
|
||||
writer.close();
|
||||
|
@ -671,24 +636,18 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
writer.close();
|
||||
Term searchTerm = new Term("content", "aaa");
|
||||
Query query = new TermQuery(searchTerm);
|
||||
|
||||
for(int i=0;i<N+1;i++) {
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND)
|
||||
.setIndexDeletionPolicy(policy).setMaxBufferedDocs(10));
|
||||
lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(useCompoundFile);
|
||||
lmp.setUseCompoundDocStore(useCompoundFile);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setUseCompoundFile(useCompoundFile);
|
||||
for(int j=0;j<17;j++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
|
@ -704,9 +663,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
reader.close();
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE)
|
||||
.setIndexDeletionPolicy(policy));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
// This will not commit: there are no changes
|
||||
// pending because we opened for "create":
|
||||
writer.close();
|
||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
|
@ -188,10 +187,7 @@ public class TestDirectoryReader extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
|
||||
IndexWriter iw = new IndexWriter(ramDir1, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(
|
||||
new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
|
||||
create ? OpenMode.CREATE : OpenMode.APPEND));
|
||||
IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
|
||||
iw.addDocument(doc);
|
||||
|
|
|
@ -29,9 +29,9 @@ import java.util.List;
|
|||
import junit.framework.TestSuite;
|
||||
import junit.textui.TestRunner;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -109,7 +109,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
PrintWriter out = new PrintWriter(sw, true);
|
||||
|
||||
Directory directory = FSDirectory.open(indexDir);
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
SegmentInfo si1 = indexDoc(writer, "test.txt");
|
||||
printSegment(out, si1);
|
||||
|
@ -137,8 +137,7 @@ public class TestDoc extends LuceneTestCase {
|
|||
out = new PrintWriter(sw, true);
|
||||
|
||||
directory = FSDirectory.open(indexDir);
|
||||
writer = new IndexWriter(directory, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
si1 = indexDoc(writer, "test.txt");
|
||||
printSegment(out, si1);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
|
@ -60,7 +61,8 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
public void testAddDocument() throws Exception {
|
||||
Document testDoc = new Document();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(testDoc);
|
||||
writer.commit();
|
||||
SegmentInfo info = writer.newestSegment();
|
||||
|
@ -117,7 +119,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("repeated", "repeated one", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -180,7 +182,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(analyzer));
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("f1", "a 5 a a", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
@ -205,9 +207,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
|
||||
|
||||
public void testPreAnalyzedField() throws IOException {
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new SimpleAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
|
||||
doc.add(new Field("preanalyzed", new TokenStream() {
|
||||
|
@ -266,9 +266,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
|
||||
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
@ -301,9 +299,7 @@ public class TestDocumentWriter extends LuceneTestCase {
|
|||
doc.add(f);
|
||||
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(
|
||||
TEST_VERSION_CURRENT)));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.optimize(); // be sure to have a single segment
|
||||
writer.close();
|
||||
|
|
|
@ -17,31 +17,22 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldSelector;
|
||||
import org.apache.lucene.document.FieldSelectorResult;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.document.LoadFirstFieldSelector;
|
||||
import org.apache.lucene.document.SetBasedFieldSelector;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.BufferedIndexInput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.BufferedIndexInput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
public class TestFieldsReader extends LuceneTestCase {
|
||||
private RAMDirectory dir = new RAMDirectory();
|
||||
private Document testDoc = new Document();
|
||||
|
@ -59,9 +50,8 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
fieldInfos = new FieldInfos();
|
||||
DocHelper.setupDoc(testDoc);
|
||||
fieldInfos.add(testDoc);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.addDocument(testDoc);
|
||||
writer.close();
|
||||
}
|
||||
|
@ -217,8 +207,8 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
FSDirectory tmpDir = FSDirectory.open(file);
|
||||
assertTrue(tmpDir != null);
|
||||
|
||||
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
IndexWriter writer = new IndexWriter(tmpDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.addDocument(testDoc);
|
||||
writer.close();
|
||||
|
||||
|
@ -397,8 +387,7 @@ public class TestFieldsReader extends LuceneTestCase {
|
|||
|
||||
try {
|
||||
Directory dir = new FaultyFSDirectory(indexDir);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<2;i++)
|
||||
writer.addDocument(testDoc);
|
||||
writer.optimize();
|
||||
|
|
|
@ -24,6 +24,7 @@ import junit.textui.TestRunner;
|
|||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
||||
|
@ -96,7 +97,8 @@ public class TestFilterIndexReader extends LuceneTestCase {
|
|||
*/
|
||||
public void testFilterIndexReader() throws Exception {
|
||||
RAMDirectory directory = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
Document d1 = new Document();
|
||||
d1.add(new Field("default","one two", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -18,14 +18,13 @@ package org.apache.lucene.index;
|
|||
*/
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.IndexOutput;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
|
@ -34,19 +33,19 @@ import java.util.*;
|
|||
against it, and add documents to it.
|
||||
*/
|
||||
|
||||
public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
|
||||
public class TestIndexFileDeleter extends LuceneTestCase
|
||||
{
|
||||
public void testDeleteLeftoverFiles() throws IOException {
|
||||
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
int i;
|
||||
for(i=0;i<35;i++) {
|
||||
addDoc(writer, i);
|
||||
}
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
writer.setUseCompoundFile(false);
|
||||
for(;i<45;i++) {
|
||||
addDoc(writer, i);
|
||||
}
|
||||
|
@ -145,7 +144,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
|||
|
||||
// Open & close a writer: it should delete the above 4
|
||||
// files and nothing more:
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
|
||||
String[] files2 = dir.listAll();
|
||||
|
|
|
@ -34,13 +34,14 @@ import java.util.SortedSet;
|
|||
import junit.framework.TestSuite;
|
||||
import junit.textui.TestRunner;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldSelector;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.document.SetBasedFieldSelector;
|
||||
import org.apache.lucene.index.IndexReader.FieldOption;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.FieldCache;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
|
@ -77,7 +78,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
commitUserData.put("foo", "fighters");
|
||||
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<27;i++)
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -98,8 +100,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTrue(c.equals(r.getIndexCommit()));
|
||||
|
||||
// Change the index
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<7;i++)
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -109,7 +111,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertFalse(r2.getIndexCommit().isOptimized());
|
||||
r3.close();
|
||||
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -120,21 +122,22 @@ public class TestIndexReader extends LuceneTestCase
|
|||
d.close();
|
||||
}
|
||||
|
||||
public void testIsCurrent() throws Exception {
|
||||
public void testIsCurrent() throws Exception
|
||||
{
|
||||
RAMDirectory d = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
// set up reader:
|
||||
IndexReader reader = IndexReader.open(d, false);
|
||||
assertTrue(reader.isCurrent());
|
||||
// modify index by adding another document:
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
assertFalse(reader.isCurrent());
|
||||
// re-create index:
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
assertFalse(reader.isCurrent());
|
||||
|
@ -146,10 +149,11 @@ public class TestIndexReader extends LuceneTestCase
|
|||
* Tests the IndexReader.getFieldNames implementation
|
||||
* @throws Exception on error
|
||||
*/
|
||||
public void testGetFieldNames() throws Exception {
|
||||
public void testGetFieldNames() throws Exception
|
||||
{
|
||||
RAMDirectory d = new MockRAMDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
// set up reader
|
||||
|
@ -161,18 +165,20 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTrue(fieldNames.contains("unstored"));
|
||||
reader.close();
|
||||
// add more documents
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
// want to get some more segments here
|
||||
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
|
||||
{
|
||||
addDocumentWithFields(writer);
|
||||
}
|
||||
// new fields are in some different segments (we hope)
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
|
||||
{
|
||||
addDocumentWithDifferentFields(writer);
|
||||
}
|
||||
// new termvector fields
|
||||
for (int i = 0; i < 5*mergeFactor; i++) {
|
||||
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
|
||||
{
|
||||
addDocumentWithTermVectorFields(writer);
|
||||
}
|
||||
|
||||
|
@ -239,11 +245,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testTermVectors() throws Exception {
|
||||
RAMDirectory d = new MockRAMDirectory();
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
// want to get some more segments here
|
||||
// new termvector fields
|
||||
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
|
||||
for (int i = 0; i < 5 * mergeFactor; i++) {
|
||||
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("tvnot","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
|
||||
doc.add(new Field("termvector","one two two three three three", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
|
||||
|
@ -266,6 +271,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTrue("entry is null and it shouldn't be", entry != null);
|
||||
System.out.println("Entry: " + entry);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
@ -293,7 +302,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
}
|
||||
|
||||
public void testBasicDelete() throws IOException {
|
||||
|
||||
|
||||
public void testBasicDelete() throws IOException
|
||||
{
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
@ -301,8 +313,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
writer.close();
|
||||
|
@ -337,11 +350,12 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir.close();
|
||||
}
|
||||
|
||||
public void testBinaryFields() throws IOException {
|
||||
public void testBinaryFields() throws IOException
|
||||
{
|
||||
Directory dir = new RAMDirectory();
|
||||
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
addDoc(writer, "document number " + (i + 1));
|
||||
|
@ -350,7 +364,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
addDocumentWithTermVectorFields(writer);
|
||||
}
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("bin1", bin));
|
||||
doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -387,7 +401,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// force optimize
|
||||
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -407,7 +421,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// Make sure attempts to make changes after reader is
|
||||
// closed throws IOException:
|
||||
public void testChangesAfterClose() throws IOException {
|
||||
public void testChangesAfterClose() throws IOException
|
||||
{
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
@ -415,8 +430,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
for (int i = 0; i < 11; i++) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 11; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
writer.close();
|
||||
|
@ -450,7 +466,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
|
||||
// Make sure we get lock obtain failed exception with 2 writers:
|
||||
public void testLockObtainFailed() throws IOException {
|
||||
public void testLockObtainFailed() throws IOException
|
||||
{
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = null;
|
||||
|
@ -458,8 +475,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 11 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
for (int i = 0; i < 11; i++) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 11; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
|
||||
|
@ -503,7 +521,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
||||
|
@ -540,16 +558,16 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// Make sure you can set norms & commit, and there are
|
||||
// no extra norms files left:
|
||||
public void testWritingNormsNoReader() throws IOException {
|
||||
public void testWritingNormsNoReader() throws IOException
|
||||
{
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = null;
|
||||
IndexReader reader = null;
|
||||
Term searchTerm = new Term("content", "aaa");
|
||||
|
||||
// add 1 documents with term : aaa
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
addDoc(writer, searchTerm.text());
|
||||
writer.close();
|
||||
|
||||
|
@ -593,7 +611,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
deleteReaderWriterConflict(true);
|
||||
}
|
||||
|
||||
private void deleteReaderWriterConflict(boolean optimize) throws IOException {
|
||||
private void deleteReaderWriterConflict(boolean optimize) throws IOException
|
||||
{
|
||||
//Directory dir = new RAMDirectory();
|
||||
Directory dir = getDirectory();
|
||||
|
||||
|
@ -601,8 +620,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
Term searchTerm2 = new Term("content", "bbb");
|
||||
|
||||
// add 100 documents with term : aaa
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm.text());
|
||||
}
|
||||
writer.close();
|
||||
|
@ -616,8 +636,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTermDocsCount("first reader", reader, searchTerm2, 0);
|
||||
|
||||
// add 100 documents with term : bbb
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm2.text());
|
||||
}
|
||||
|
||||
|
@ -677,11 +698,12 @@ public class TestIndexReader extends LuceneTestCase
|
|||
return FSDirectory.open(new File(System.getProperty("tempDir"), "testIndex"));
|
||||
}
|
||||
|
||||
public void testFilesOpenClose() throws IOException {
|
||||
public void testFilesOpenClose() throws IOException
|
||||
{
|
||||
// Create initial data set
|
||||
File dirFile = new File(System.getProperty("tempDir"), "testIndex");
|
||||
Directory dir = getDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -691,7 +713,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir = getDirectory();
|
||||
|
||||
// Now create the data set again, just as before
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "test");
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
@ -717,7 +739,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
else
|
||||
dir = getDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -734,7 +756,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// incremented:
|
||||
Thread.sleep(1000);
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -751,7 +773,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testVersion() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
|
||||
writer.close();
|
||||
|
@ -762,7 +784,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
reader.close();
|
||||
// modify index and check version has been
|
||||
// incremented:
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, false);
|
||||
|
@ -773,10 +795,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testLock() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
try {
|
||||
reader.deleteDocument(0);
|
||||
|
@ -793,7 +815,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAll() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -810,7 +832,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterClose() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -827,7 +849,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -865,7 +887,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
// First build up a starting index:
|
||||
RAMDirectory startDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<157;i++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
|
||||
|
@ -1055,7 +1077,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testDocsOutOfOrderJIRA140() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for(int i=0;i<11;i++) {
|
||||
addDoc(writer, "aaa");
|
||||
}
|
||||
|
@ -1073,7 +1095,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// We must add more docs to get a new segment written
|
||||
for(int i=0;i<11;i++) {
|
||||
|
@ -1095,7 +1117,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDoc(writer, "aaa");
|
||||
writer.close();
|
||||
|
||||
|
@ -1160,7 +1182,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
dir.close();
|
||||
}
|
||||
|
||||
private void deleteReaderReaderConflict(boolean optimize) throws IOException {
|
||||
private void deleteReaderReaderConflict(boolean optimize) throws IOException
|
||||
{
|
||||
Directory dir = getDirectory();
|
||||
|
||||
Term searchTerm1 = new Term("content", "aaa");
|
||||
|
@ -1170,8 +1193,9 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// add 100 documents with term : aaa
|
||||
// add 100 documents with term : bbb
|
||||
// add 100 documents with term : ccc
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
addDoc(writer, searchTerm1.text());
|
||||
addDoc(writer, searchTerm2.text());
|
||||
addDoc(writer, searchTerm3.text());
|
||||
|
@ -1393,7 +1417,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
RAMDirectory d = new MockRAMDirectory();
|
||||
|
||||
// set up writer
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<27;i++)
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -1408,8 +1433,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertTrue(c.equals(r.getIndexCommit()));
|
||||
|
||||
// Change the index
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for(int i=0;i<7;i++)
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
@ -1419,7 +1444,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertFalse(r2.getIndexCommit().isOptimized());
|
||||
r2.close();
|
||||
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -1433,7 +1458,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
public void testReadOnly() throws Throwable {
|
||||
RAMDirectory d = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.commit();
|
||||
addDocumentWithFields(writer);
|
||||
|
@ -1447,7 +1472,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// expected
|
||||
}
|
||||
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
||||
|
@ -1464,7 +1489,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// expected
|
||||
}
|
||||
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.optimize();
|
||||
writer.close();
|
||||
|
||||
|
@ -1482,7 +1507,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
}
|
||||
|
||||
// Make sure write lock isn't held
|
||||
writer = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.close();
|
||||
|
||||
r3.close();
|
||||
|
@ -1492,7 +1517,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1474
|
||||
public void testIndexReader() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("b"));
|
||||
writer.addDocument(createDocument("c"));
|
||||
|
@ -1509,7 +1535,8 @@ public class TestIndexReader extends LuceneTestCase
|
|||
public void testIndexReaderUnDeleteAll() throws Exception {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("b"));
|
||||
writer.addDocument(createDocument("c"));
|
||||
|
@ -1550,7 +1577,10 @@ public class TestIndexReader extends LuceneTestCase
|
|||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setMaxBufferedDocs(2);
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("a"));
|
||||
writer.addDocument(createDocument("a"));
|
||||
|
@ -1573,7 +1603,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// reuse the doc values arrays in FieldCache
|
||||
public void testFieldCacheReuseAfterClone() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1604,7 +1634,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// FieldCache
|
||||
public void testFieldCacheReuseAfterReopen() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1636,7 +1666,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// reopen switches readOnly
|
||||
public void testReopenChangeReadonly() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("number", "17", Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -1677,7 +1707,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1586: getUniqueTermCount
|
||||
public void testUniqueTermCount() throws Exception {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1710,7 +1740,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-1609: don't load terms index
|
||||
public void testNoTermsIndex() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO, Field.Index.ANALYZED));
|
||||
doc.add(new Field("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO, Field.Index.ANALYZED));
|
||||
|
@ -1728,7 +1758,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
assertFalse(((SegmentReader) r.getSequentialSubReaders()[0]).termsIndexLoaded());
|
||||
|
||||
assertEquals(-1, ((SegmentReader) r.getSequentialSubReaders()[0]).getTermInfosIndexDivisor());
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
|
@ -1747,7 +1777,7 @@ public class TestIndexReader extends LuceneTestCase
|
|||
// LUCENE-2046
|
||||
public void testPrepareCommitIsCurrent() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
Document doc = new Document();
|
||||
writer.addDocument(doc);
|
||||
IndexReader r = IndexReader.open(dir, true);
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.index;
|
|||
|
||||
import org.apache.lucene.index.SegmentReader.Norm;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -196,7 +197,7 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
|
||||
TestIndexReaderReopen.createIndex(dir1, true);
|
||||
IndexReader reader1 = IndexReader.open(dir1, false);
|
||||
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir1, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
IndexReader reader2 = reader1.clone(true);
|
||||
|
@ -483,9 +484,8 @@ public class TestIndexReaderClone extends LuceneTestCase {
|
|||
|
||||
public void testCloseStoredFields() throws Exception {
|
||||
final Directory dir = new MockRAMDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
((LogMergePolicy) w.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) w.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setUseCompoundFile(false);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "yes it's stored", Field.Store.YES, Field.Index.ANALYZED));
|
||||
w.addDocument(doc);
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.SegmentReader.Norm;
|
||||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
|
@ -119,10 +118,10 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
Directory dir3 = FSDirectory.open(indexDir3);
|
||||
|
||||
createIndex(dir3);
|
||||
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
|
||||
.setMaxBufferedDocs(5));
|
||||
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
|
||||
IndexWriter iw = new IndexWriter(dir3, anlzr, false,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.addIndexesNoOptimize(new Directory[] { dir1, dir2 });
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
|
@ -138,9 +137,9 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
doTestNorms(dir3);
|
||||
|
||||
// now with optimize
|
||||
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
|
||||
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
|
||||
iw = new IndexWriter(dir3, anlzr, false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
verifyIndex(dir3);
|
||||
|
@ -239,13 +238,12 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void createIndex(Directory dir) throws IOException {
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.CREATE)
|
||||
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
|
||||
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
|
||||
lmp.setMergeFactor(3);
|
||||
lmp.setUseCompoundFile(true);
|
||||
lmp.setUseCompoundDocStore(true);
|
||||
IndexWriter iw = new IndexWriter(dir, anlzr, true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.setSimilarity(similarityOne);
|
||||
iw.setUseCompoundFile(true);
|
||||
iw.close();
|
||||
}
|
||||
|
||||
|
@ -292,13 +290,12 @@ public class TestIndexReaderCloneNorms extends LuceneTestCase {
|
|||
|
||||
private void addDocs(Directory dir, int ndocs, boolean compound)
|
||||
throws IOException {
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
|
||||
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
|
||||
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
|
||||
lmp.setMergeFactor(3);
|
||||
lmp.setUseCompoundFile(compound);
|
||||
lmp.setUseCompoundDocStore(compound);
|
||||
IndexWriter iw = new IndexWriter(dir, anlzr, false,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.setSimilarity(similarityOne);
|
||||
iw.setUseCompoundFile(compound);
|
||||
for (int i = 0; i < ndocs; i++) {
|
||||
iw.addDocument(newDoc());
|
||||
}
|
||||
|
|
|
@ -31,11 +31,13 @@ import java.util.HashMap;
|
|||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.IndexWriter.MaxFieldLength;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
|
@ -169,9 +171,8 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void doTestReopenWithCommit (Directory dir, boolean withReopen) throws IOException {
|
||||
IndexWriter iwriter = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(
|
||||
new KeywordAnalyzer()).setMergeScheduler(new SerialMergeScheduler()));
|
||||
IndexWriter iwriter = new IndexWriter(dir, new KeywordAnalyzer(), true, MaxFieldLength.LIMITED);
|
||||
iwriter.setMergeScheduler(new SerialMergeScheduler());
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
try {
|
||||
int M = 3;
|
||||
|
@ -701,7 +702,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
final Directory dir = new MockRAMDirectory();
|
||||
final int n = 30;
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < n; i++) {
|
||||
writer.addDocument(createDocument(i, 3));
|
||||
}
|
||||
|
@ -720,7 +721,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
modifier.deleteDocument(i % modifier.maxDoc());
|
||||
modifier.close();
|
||||
} else {
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
modifier.addDocument(createDocument(n + i, 6));
|
||||
modifier.close();
|
||||
}
|
||||
|
@ -945,7 +946,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
|
||||
public static void createIndex(Directory dir, boolean multiSegment) throws IOException {
|
||||
IndexWriter.unlock(dir);
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||
|
||||
|
@ -990,7 +991,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
static void modifyIndex(int i, Directory dir) throws IOException {
|
||||
switch (i) {
|
||||
case 0: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.deleteDocuments(new Term("field2", "a11"));
|
||||
w.deleteDocuments(new Term("field2", "b30"));
|
||||
w.close();
|
||||
|
@ -1005,13 +1006,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 2: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.optimize();
|
||||
w.close();
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.optimize();
|
||||
w.addDocument(createDocument(102, 4));
|
||||
|
@ -1027,7 +1028,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
break;
|
||||
}
|
||||
case 5: {
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.addDocument(createDocument(101, 4));
|
||||
w.close();
|
||||
break;
|
||||
|
@ -1191,8 +1192,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
|
||||
public void testReopenOnCommit() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setIndexDeletionPolicy(new KeepAllCommits()));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for(int i=0;i<4;i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,263 +0,0 @@
|
|||
package org.apache.lucene.index;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.index.DocumentsWriter.IndexingChain;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCaseJ4;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestIndexWriterConfig extends LuceneTestCaseJ4 {
|
||||
|
||||
private static final class MySimilarity extends DefaultSimilarity {
|
||||
// Does not implement anything - used only for type checking on IndexWriterConfig.
|
||||
}
|
||||
|
||||
private static final class MyIndexingChain extends IndexingChain {
|
||||
// Does not implement anything - used only for type checking on IndexWriterConfig.
|
||||
|
||||
@Override
|
||||
DocConsumer getChain(DocumentsWriter documentsWriter) {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaults() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT);
|
||||
assertEquals(WhitespaceAnalyzer.class, conf.getAnalyzer().getClass());
|
||||
assertNull(conf.getIndexCommit());
|
||||
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
assertEquals(IndexWriterConfig.UNLIMITED_FIELD_LENGTH, conf.getMaxFieldLength());
|
||||
assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
|
||||
assertEquals(OpenMode.CREATE_OR_APPEND, conf.getOpenMode());
|
||||
assertTrue(Similarity.getDefault() == conf.getSimilarity());
|
||||
assertEquals(IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, conf.getTermIndexInterval());
|
||||
assertEquals(IndexWriterConfig.getDefaultWriteLockTimeout(), conf.getWriteLockTimeout());
|
||||
assertEquals(IndexWriterConfig.WRITE_LOCK_TIMEOUT, IndexWriterConfig.getDefaultWriteLockTimeout());
|
||||
assertEquals(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS, conf.getMaxBufferedDeleteTerms());
|
||||
assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, conf.getRAMBufferSizeMB(), 0.0);
|
||||
assertEquals(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS, conf.getMaxBufferedDocs());
|
||||
assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
|
||||
|
||||
// Sanity check - validate that all getters are covered.
|
||||
Set<String> getters = new HashSet<String>();
|
||||
getters.add("getAnalyzer");
|
||||
getters.add("getIndexCommit");
|
||||
getters.add("getIndexDeletionPolicy");
|
||||
getters.add("getMaxFieldLength");
|
||||
getters.add("getMergeScheduler");
|
||||
getters.add("getOpenMode");
|
||||
getters.add("getSimilarity");
|
||||
getters.add("getTermIndexInterval");
|
||||
getters.add("getWriteLockTimeout");
|
||||
getters.add("getDefaultWriteLockTimeout");
|
||||
getters.add("getMaxBufferedDeleteTerms");
|
||||
getters.add("getRAMBufferSizeMB");
|
||||
getters.add("getMaxBufferedDocs");
|
||||
getters.add("getIndexingChain");
|
||||
for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
|
||||
if (m.getDeclaringClass() == IndexWriterConfig.class && m.getName().startsWith("get")) {
|
||||
assertTrue("method " + m.getName() + " is not tested for defaults", getters.contains(m.getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSettersChaining() throws Exception {
|
||||
// Ensures that every setter returns IndexWriterConfig to enable easy
|
||||
// chaining.
|
||||
for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
|
||||
if (m.getDeclaringClass() == IndexWriterConfig.class
|
||||
&& m.getName().startsWith("set")
|
||||
&& !Modifier.isStatic(m.getModifiers())) {
|
||||
assertEquals("method " + m.getName() + " does not return IndexWriterConfig",
|
||||
IndexWriterConfig.class, m.getReturnType());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConstants() throws Exception {
|
||||
// Tests that the values of the constants does not change
|
||||
assertEquals(1000, IndexWriterConfig.WRITE_LOCK_TIMEOUT);
|
||||
assertEquals(128, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL);
|
||||
assertEquals(Integer.MAX_VALUE, IndexWriterConfig.UNLIMITED_FIELD_LENGTH);
|
||||
assertEquals(-1, IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS);
|
||||
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
|
||||
assertEquals(16.0, IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, 0.0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToString() throws Exception {
|
||||
String str = new IndexWriterConfig(TEST_VERSION_CURRENT).toString();
|
||||
for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
|
||||
int modifiers = f.getModifiers();
|
||||
if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
|
||||
// Skip static final fields, they are only constants
|
||||
continue;
|
||||
} else if ("indexingChain".equals(f.getName())) {
|
||||
// indexingChain is a package-private setting and thus is not output by
|
||||
// toString.
|
||||
continue;
|
||||
}
|
||||
assertTrue(f.getName() + " not found in toString", str.indexOf(f.getName()) != -1);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClone() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT);
|
||||
IndexWriterConfig clone = (IndexWriterConfig) conf.clone();
|
||||
|
||||
// Clone is shallow since not all parameters are cloneable.
|
||||
assertTrue(conf.getIndexDeletionPolicy() == clone.getIndexDeletionPolicy());
|
||||
|
||||
conf.setMergeScheduler(new SerialMergeScheduler());
|
||||
assertEquals(ConcurrentMergeScheduler.class, clone.getMergeScheduler().getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidValues() throws Exception {
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT);
|
||||
|
||||
// Test Analyzer
|
||||
assertEquals(WhitespaceAnalyzer.class, conf.getAnalyzer().getClass());
|
||||
conf.setAnalyzer(new SimpleAnalyzer(TEST_VERSION_CURRENT));
|
||||
assertEquals(SimpleAnalyzer.class, conf.getAnalyzer().getClass());
|
||||
conf.setAnalyzer(null);
|
||||
assertEquals(WhitespaceAnalyzer.class, conf.getAnalyzer().getClass());
|
||||
|
||||
// Test IndexDeletionPolicy
|
||||
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(null));
|
||||
assertEquals(SnapshotDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
conf.setIndexDeletionPolicy(null);
|
||||
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
|
||||
|
||||
// Test MergeScheduler
|
||||
assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
|
||||
conf.setMergeScheduler(new SerialMergeScheduler());
|
||||
assertEquals(SerialMergeScheduler.class, conf.getMergeScheduler().getClass());
|
||||
conf.setMergeScheduler(null);
|
||||
assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
|
||||
|
||||
// Test Similarity
|
||||
assertTrue(Similarity.getDefault() == conf.getSimilarity());
|
||||
conf.setSimilarity(new MySimilarity());
|
||||
assertEquals(MySimilarity.class, conf.getSimilarity().getClass());
|
||||
conf.setSimilarity(null);
|
||||
assertTrue(Similarity.getDefault() == conf.getSimilarity());
|
||||
|
||||
// Test IndexingChain
|
||||
assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
|
||||
conf.setIndexingChain(new MyIndexingChain());
|
||||
assertEquals(MyIndexingChain.class, conf.getIndexingChain().getClass());
|
||||
conf.setIndexingChain(null);
|
||||
assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
|
||||
|
||||
try {
|
||||
conf.setMaxBufferedDeleteTerms(0);
|
||||
fail("should not have succeeded to set maxBufferedDeleteTerms to 0");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// this is expected
|
||||
}
|
||||
|
||||
try {
|
||||
conf.setMaxBufferedDocs(1);
|
||||
fail("should not have succeeded to set maxBufferedDocs to 1");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// this is expected
|
||||
}
|
||||
|
||||
try {
|
||||
// Disable both MAX_BUF_DOCS and RAM_SIZE_MB
|
||||
conf.setMaxBufferedDocs(4);
|
||||
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
fail("should not have succeeded to disable maxBufferedDocs when ramBufferSizeMB is disabled as well");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// this is expected
|
||||
}
|
||||
|
||||
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
|
||||
conf.setMaxBufferedDocs(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
|
||||
try {
|
||||
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
|
||||
fail("should not have succeeded to disable ramBufferSizeMB when maxBufferedDocs is disabled as well");
|
||||
} catch (IllegalArgumentException e) {
|
||||
// this is expected
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated should be removed once all the deprecated setters are removed
|
||||
* from IndexWriter.
|
||||
*/
|
||||
@Test
|
||||
public void testIndexWriterSetters() throws Exception {
|
||||
// This test intentionally tests deprecated methods. The purpose is to pass
|
||||
// whatever the user set on IW to IWC, so that if the user calls
|
||||
// iw.getConfig().getXYZ(), he'll get the same value he passed to
|
||||
// iw.setXYZ().
|
||||
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT);
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, conf);
|
||||
|
||||
writer.setSimilarity(new MySimilarity());
|
||||
assertEquals(MySimilarity.class, writer.getConfig().getSimilarity().getClass());
|
||||
|
||||
writer.setMaxBufferedDeleteTerms(4);
|
||||
assertEquals(4, writer.getConfig().getMaxBufferedDeleteTerms());
|
||||
|
||||
writer.setMaxBufferedDocs(10);
|
||||
assertEquals(10, writer.getConfig().getMaxBufferedDocs());
|
||||
|
||||
writer.setMaxFieldLength(10);
|
||||
assertEquals(10, writer.getConfig().getMaxFieldLength());
|
||||
|
||||
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||
assertEquals(SerialMergeScheduler.class, writer.getConfig().getMergeScheduler().getClass());
|
||||
|
||||
writer.setRAMBufferSizeMB(1.5);
|
||||
assertEquals(1.5, writer.getConfig().getRAMBufferSizeMB(), 0.0);
|
||||
|
||||
writer.setTermIndexInterval(40);
|
||||
assertEquals(40, writer.getConfig().getTermIndexInterval());
|
||||
|
||||
writer.setWriteLockTimeout(100);
|
||||
assertEquals(100, writer.getConfig().getWriteLockTimeout());
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -40,8 +41,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(1));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setUseCompoundFile(true);
|
||||
modifier.setMaxBufferedDeleteTerms(1);
|
||||
|
||||
for (int i = 0; i < keywords.length; i++) {
|
||||
Document doc = new Document();
|
||||
|
@ -75,9 +78,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testNonRAMDelete() throws IOException {
|
||||
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -109,8 +113,9 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
public void testMaxBufferedDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(1));
|
||||
IndexWriter writer = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDeleteTerms(1);
|
||||
writer.deleteDocuments(new Term("foobar", "1"));
|
||||
writer.deleteDocuments(new Term("foobar", "1"));
|
||||
writer.deleteDocuments(new Term("foobar", "1"));
|
||||
|
@ -123,9 +128,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
public void testRAMDeletes() throws IOException {
|
||||
for(int t=0;t<2;t++) {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(4)
|
||||
.setMaxBufferedDeleteTerms(4));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(4);
|
||||
modifier.setMaxBufferedDeleteTerms(4);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -164,9 +170,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// test when delete terms apply to both disk and ram segments
|
||||
public void testBothDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(100)
|
||||
.setMaxBufferedDeleteTerms(100));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(100);
|
||||
modifier.setMaxBufferedDeleteTerms(100);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -196,9 +203,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// test that batched delete terms are flushed together
|
||||
public void testBatchDeletes() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -239,9 +247,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// test deleteAll()
|
||||
public void testDeleteAll() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -285,9 +294,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// test rollback of deleteAll()
|
||||
public void testDeleteAllRollback() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -322,9 +332,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
// test deleteAll() w/ near real-time reader
|
||||
public void testDeleteAllNRT() throws IOException {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(2)
|
||||
.setMaxBufferedDeleteTerms(2));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setMaxBufferedDocs(2);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
int id = 0;
|
||||
int value = 100;
|
||||
|
@ -413,7 +424,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
|
||||
// First build up a starting index:
|
||||
MockRAMDirectory startDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(startDir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
for (int i = 0; i < 157; i++) {
|
||||
Document d = new Document();
|
||||
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
|
||||
|
@ -435,9 +447,11 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
while (!done) {
|
||||
MockRAMDirectory dir = new MockRAMDirectory(startDir);
|
||||
dir.setPreventDoubleWrite(false);
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDocs(1000)
|
||||
.setMaxBufferedDeleteTerms(1000));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
modifier.setMaxBufferedDocs(1000); // use flush or close
|
||||
modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
|
||||
|
||||
// For each disk size, first try to commit against
|
||||
// dir that will hit random IOExceptions & disk
|
||||
|
@ -639,11 +653,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setMaxBufferedDeleteTerms(2));
|
||||
LogMergePolicy lmp = (LogMergePolicy) modifier.getMergePolicy();
|
||||
lmp.setUseCompoundFile(true);
|
||||
lmp.setUseCompoundDocStore(true);
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
modifier.setUseCompoundFile(true);
|
||||
modifier.setMaxBufferedDeleteTerms(2);
|
||||
|
||||
dir.failOn(failure.reset());
|
||||
|
||||
|
@ -749,7 +762,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
String[] text = { "Amsterdam", "Venice" };
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter modifier = new IndexWriter(dir,
|
||||
new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
dir.failOn(failure.reset());
|
||||
|
||||
|
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.store.MockRAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
|
||||
|
@ -109,11 +111,11 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
|
||||
ThreadLocal<Thread> doFail = new ThreadLocal<Thread>();
|
||||
|
||||
private class MockIndexWriter extends IndexWriter {
|
||||
public class MockIndexWriter extends IndexWriter {
|
||||
Random r = new java.util.Random(17);
|
||||
|
||||
public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
|
||||
super(dir, conf);
|
||||
public MockIndexWriter(Directory dir, Analyzer a, boolean create, MaxFieldLength mfl) throws IOException {
|
||||
super(dir, a, create, mfl);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -132,9 +134,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testRandomExceptions() throws Throwable {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setRAMBufferSizeMB(0.1));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
writer.setRAMBufferSizeMB(0.1);
|
||||
|
||||
if (DEBUG)
|
||||
writer.setInfoStream(System.out);
|
||||
|
@ -169,9 +172,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
|||
public void testRandomExceptionsThreads() throws Throwable {
|
||||
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setRAMBufferSizeMB(0.2));
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
|
||||
MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
|
||||
//writer.setMaxBufferedDocs(10);
|
||||
writer.setRAMBufferSizeMB(0.2);
|
||||
|
||||
if (DEBUG)
|
||||
writer.setInfoStream(System.out);
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.io.FileNotFoundException;
|
|||
import java.io.IOException;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
||||
/**
|
||||
|
@ -75,10 +74,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
|
|||
public void testIndexWriterLockRelease() throws IOException {
|
||||
FSDirectory dir = FSDirectory.open(this.__test_dir);
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
} catch (FileNotFoundException e) {
|
||||
try {
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND));
|
||||
new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
} catch (FileNotFoundException e1) {
|
||||
}
|
||||
} finally {
|
||||
|
|
|
@ -19,9 +19,9 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
@ -34,8 +34,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testNormalCase() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -50,8 +51,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testNoOverMerge() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
||||
boolean noOverMerge = false;
|
||||
|
@ -71,8 +73,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testForceFlush() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
|
||||
mp.setMinMergeDocs(100);
|
||||
writer.setMergePolicy(mp);
|
||||
|
@ -81,11 +84,11 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
addDoc(writer);
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergePolicy(mp);
|
||||
mp.setMinMergeDocs(100);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
writer.setMergeFactor(10);
|
||||
checkInvariants(writer);
|
||||
}
|
||||
|
||||
|
@ -96,8 +99,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMergeFactorChange() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(100);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
||||
for (int i = 0; i < 250; i++) {
|
||||
|
@ -105,7 +109,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
checkInvariants(writer);
|
||||
}
|
||||
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
|
||||
writer.setMergeFactor(5);
|
||||
|
||||
// merge policy only fixes segments on levels where merges
|
||||
// have been triggered, so check invariants after all adds
|
||||
|
@ -121,8 +125,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMaxBufferedDocsChange() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(101));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(101);
|
||||
writer.setMergeFactor(101);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
|
||||
// leftmost* segment has 1 doc
|
||||
|
@ -134,17 +139,14 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
}
|
||||
writer.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(101));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMaxBufferedDocs(101);
|
||||
writer.setMergeFactor(101);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
}
|
||||
|
||||
writer.close();
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(10);
|
||||
|
||||
// merge policy only fixes segments on levels where merges
|
||||
// have been triggered, so check invariants after all adds
|
||||
|
@ -157,7 +159,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
addDoc(writer);
|
||||
}
|
||||
writer.commit();
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
|
||||
writer.commit();
|
||||
checkInvariants(writer);
|
||||
|
||||
|
@ -168,9 +170,10 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
public void testMergeDocCount0() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(100);
|
||||
|
||||
for (int i = 0; i < 250; i++) {
|
||||
addDoc(writer);
|
||||
|
@ -182,17 +185,17 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
reader.deleteDocuments(new Term("content", "aaa"));
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
|
||||
writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
writer.setMergePolicy(new LogDocMergePolicy(writer));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
writer.setMergeFactor(5);
|
||||
|
||||
// merge factor is changed, so check invariants after all adds
|
||||
for (int i = 0; i < 10; i++) {
|
||||
addDoc(writer);
|
||||
}
|
||||
writer.commit();
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
|
||||
writer.commit();
|
||||
checkInvariants(writer);
|
||||
assertEquals(10, writer.maxDoc());
|
||||
|
@ -208,9 +211,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
|
|||
|
||||
private void checkInvariants(IndexWriter writer) throws IOException {
|
||||
_TestUtil.syncConcurrentMerges(writer);
|
||||
int maxBufferedDocs = writer.getConfig().getMaxBufferedDocs();
|
||||
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
|
||||
int maxMergeDocs = ((LogMergePolicy) writer.getMergePolicy()).getMaxMergeDocs();
|
||||
int maxBufferedDocs = writer.getMaxBufferedDocs();
|
||||
int mergeFactor = writer.getMergeFactor();
|
||||
int maxMergeDocs = writer.getMaxMergeDocs();
|
||||
|
||||
int ramSegmentCount = writer.getNumBufferedDocuments();
|
||||
assertTrue(ramSegmentCount < maxBufferedDocs);
|
||||
|
|
|
@ -20,7 +20,6 @@ import org.apache.lucene.store.MockRAMDirectory;
|
|||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -57,8 +56,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
|
||||
Directory merged = new MockRAMDirectory();
|
||||
|
||||
IndexWriter writer = new IndexWriter(merged, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new StandardAnalyzer(TEST_VERSION_CURRENT)));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergeFactor(2);
|
||||
|
||||
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
|
||||
writer.optimize();
|
||||
|
@ -91,13 +90,12 @@ public class TestIndexWriterMerging extends LuceneTestCase
|
|||
return fail;
|
||||
}
|
||||
|
||||
private void fillIndex(Directory dir, int start, int numDocs) throws IOException {
|
||||
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
|
||||
{
|
||||
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(
|
||||
new StandardAnalyzer(TEST_VERSION_CURRENT))
|
||||
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMergeFactor(2);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
|
||||
for (int i = start; i < (start + numDocs); i++)
|
||||
{
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
|
@ -74,7 +75,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
// create the index
|
||||
createIndexNoClose(!optimize, "index1", writer);
|
||||
|
@ -108,7 +110,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
assertEquals(0, count(new Term("id", id10), r3));
|
||||
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
|
||||
|
||||
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
writer.addDocument(doc);
|
||||
|
@ -135,7 +138,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
// create the index
|
||||
createIndexNoClose(!optimize, "index1", writer);
|
||||
|
@ -143,7 +147,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
// create a 2nd index
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
writer2.close();
|
||||
|
@ -180,12 +185,14 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = false;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create a 2nd index
|
||||
Directory dir2 = new MockRAMDirectory();
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer2.setInfoStream(infoStream);
|
||||
createIndexNoClose(!optimize, "index2", writer2);
|
||||
writer2.close();
|
||||
|
@ -213,7 +220,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
boolean optimize = true;
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
// create the index
|
||||
createIndexNoClose(!optimize, "index1", writer);
|
||||
|
@ -251,7 +259,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// reopen the writer to verify the delete made it to the directory
|
||||
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
assertEquals(0, count(new Term("id", id10), w2r1));
|
||||
|
@ -265,7 +274,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
int numDirs = 3;
|
||||
|
||||
Directory mainDir = new MockRAMDirectory();
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
mainWriter.setInfoStream(infoStream);
|
||||
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
|
||||
addDirThreads.launchThreads(numDirs);
|
||||
|
@ -308,7 +318,9 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
this.numDirs = numDirs;
|
||||
this.mainWriter = mainWriter;
|
||||
addDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(addDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
for (int i = 0; i < NUM_INIT_DOCS; i++) {
|
||||
Document doc = createDocument(i, "addindex", 4);
|
||||
writer.addDocument(doc);
|
||||
|
@ -414,7 +426,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
*/
|
||||
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
IndexReader r1 = writer.getReader();
|
||||
assertEquals(0, r1.maxDoc());
|
||||
|
@ -451,7 +464,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
writer.close();
|
||||
|
||||
// test whether the changes made it to the directory
|
||||
writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
IndexReader w2r1 = writer.getReader();
|
||||
// insure the deletes were actually flushed to the directory
|
||||
assertEquals(200, w2r1.maxDoc());
|
||||
|
@ -490,7 +504,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public static void createIndex(Directory dir1, String indexName,
|
||||
boolean multiSegment) throws IOException {
|
||||
IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
w.setMergePolicy(new LogDocMergePolicy(w));
|
||||
for (int i = 0; i < 100; i++) {
|
||||
w.addDocument(createDocument(i, indexName, 4));
|
||||
|
@ -524,7 +539,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
public void testMergeWarmer() throws Exception {
|
||||
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(2));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create the index
|
||||
|
@ -536,12 +552,13 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Enroll warmer
|
||||
MyWarmer warmer = new MyWarmer();
|
||||
writer.setMergedSegmentWarmer(warmer);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
writer.setMergeFactor(2);
|
||||
writer.setMaxBufferedDocs(2);
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
writer.addDocument(createDocument(i, "test", 4));
|
||||
}
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
|
||||
|
||||
assertTrue(warmer.warmCount > 0);
|
||||
final int count = warmer.warmCount;
|
||||
|
@ -557,7 +574,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testAfterCommit() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create the index
|
||||
|
@ -573,7 +591,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
for (int i = 0; i < 10; i++) {
|
||||
writer.addDocument(createDocument(i, "test", 4));
|
||||
}
|
||||
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
|
||||
((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
|
||||
|
||||
IndexReader r2 = r1.reopen();
|
||||
if (r2 != r1) {
|
||||
|
@ -589,7 +607,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Make sure reader remains usable even if IndexWriter closes
|
||||
public void testAfterClose() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
|
||||
// create the index
|
||||
|
@ -618,9 +637,10 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Stress test reopen during addIndexes
|
||||
public void testDuringAddIndexes() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
writer.setMergeFactor(2);
|
||||
|
||||
// create the index
|
||||
createIndexNoClose(false, "test", writer);
|
||||
|
@ -695,9 +715,10 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
// Stress test reopen during add/delete
|
||||
public void testDuringAddDelete() throws Exception {
|
||||
Directory dir1 = new MockRAMDirectory();
|
||||
final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setInfoStream(infoStream);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
writer.setMergeFactor(2);
|
||||
|
||||
// create the index
|
||||
createIndexNoClose(false, "test", writer);
|
||||
|
@ -775,7 +796,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testExpungeDeletes() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||
|
@ -799,7 +821,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
|
||||
public void testDeletesNumDocs() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||
|
|
|
@ -17,21 +17,14 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldSelector;
|
||||
import org.apache.lucene.document.FieldSelectorResult;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.SimpleAnalyzer;
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -70,10 +63,10 @@ public class TestLazyBug extends LuceneTestCase {
|
|||
Directory dir = new RAMDirectory();
|
||||
try {
|
||||
Random r = newRandom();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
|
||||
lmp.setUseCompoundFile(false);
|
||||
lmp.setUseCompoundDocStore(false);
|
||||
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
|
||||
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
|
||||
writer.setUseCompoundFile(false);
|
||||
|
||||
for (int d = 1; d <= NUM_DOCS; d++) {
|
||||
Document doc = new Document();
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -59,9 +60,9 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
int numDocs = 500;
|
||||
|
||||
Directory directory = new SeekCountingDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
Document doc = new Document();
|
||||
String content;
|
||||
|
@ -117,7 +118,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
|
||||
public void testSeek() throws IOException {
|
||||
Directory directory = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
Document doc = new Document();
|
||||
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
|
||||
|
|
|
@ -44,7 +44,8 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
public class TestMultiLevelSkipList extends LuceneTestCase {
|
||||
public void testSimpleSkip() throws IOException {
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT).setAnalyzer(new PayloadAnalyzer()));
|
||||
IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true,
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Term term = new Term("test", "a");
|
||||
for (int i = 0; i < 5000; i++) {
|
||||
Document d1 = new Document();
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.lucene.index;
|
|||
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -31,12 +32,13 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
|
|||
|
||||
public void testIndexing() throws Exception {
|
||||
Directory mainDir = new MockRAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT).setMaxBufferedDocs(10));
|
||||
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
|
||||
((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
|
||||
IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
writer.setUseCompoundFile(false);
|
||||
IndexReader reader = writer.getReader(); // start pooling readers
|
||||
reader.close();
|
||||
writer.setMergeFactor(2);
|
||||
writer.setMaxBufferedDocs(10);
|
||||
RunThread[] indexThreads = new RunThread[4];
|
||||
for (int x=0; x < indexThreads.length; x++) {
|
||||
indexThreads[x] = new RunThread(x % 2, writer);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.search.DefaultSimilarity;
|
||||
import org.apache.lucene.search.Similarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
|
@ -100,10 +99,9 @@ public class TestNorms extends LuceneTestCase {
|
|||
Directory dir3 = new RAMDirectory();
|
||||
|
||||
createIndex(dir3);
|
||||
IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setAnalyzer(anlzr).setOpenMode(OpenMode.APPEND)
|
||||
.setMaxBufferedDocs(5));
|
||||
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
|
||||
IndexWriter iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.addIndexesNoOptimize(new Directory[]{dir1,dir2});
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
|
@ -119,9 +117,9 @@ public class TestNorms extends LuceneTestCase {
|
|||
doTestNorms(dir3);
|
||||
|
||||
// now with optimize
|
||||
iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT)
|
||||
.setOpenMode(OpenMode.APPEND).setAnalyzer(anlzr).setMaxBufferedDocs(5));
|
||||
((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
|
||||
iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.optimize();
|
||||
iw.close();
|
||||
verifyIndex(dir3);
|
||||
|
@ -145,13 +143,11 @@ public class TestNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void createIndex(Directory dir) throws IOException {
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.CREATE).setAnalyzer(anlzr)
|
||||
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
|
||||
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
|
||||
lmp.setMergeFactor(3);
|
||||
lmp.setUseCompoundFile(true);
|
||||
lmp.setUseCompoundDocStore(true);
|
||||
IndexWriter iw = new IndexWriter(dir,anlzr,true, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.setSimilarity(similarityOne);
|
||||
iw.setUseCompoundFile(true);
|
||||
iw.close();
|
||||
}
|
||||
|
||||
|
@ -189,13 +185,11 @@ public class TestNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
|
||||
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT).setOpenMode(OpenMode.APPEND).setAnalyzer(anlzr)
|
||||
.setMaxBufferedDocs(5).setSimilarity(similarityOne));
|
||||
LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
|
||||
lmp.setMergeFactor(3);
|
||||
lmp.setUseCompoundFile(compound);
|
||||
lmp.setUseCompoundDocStore(compound);
|
||||
IndexWriter iw = new IndexWriter(dir,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
|
||||
iw.setMaxBufferedDocs(5);
|
||||
iw.setMergeFactor(3);
|
||||
iw.setSimilarity(similarityOne);
|
||||
iw.setUseCompoundFile(compound);
|
||||
for (int i = 0; i < ndocs; i++) {
|
||||
iw.addDocument(newDoc());
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue