LUCENE-1737: make field name -> number congruent so we always use bulk merge for merging doc stores

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1040940 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-12-01 10:46:46 +00:00
parent f1ce9abcfb
commit e7214a0f6f
3 changed files with 33 additions and 9 deletions

View File

@ -39,9 +39,9 @@ final class DocFieldProcessor extends DocConsumer {
final StoredFieldsWriter fieldsWriter;
public DocFieldProcessor(DocumentsWriter docWriter, DocFieldConsumer consumer) {
this.fieldInfos = new FieldInfos();
this.docWriter = docWriter;
this.consumer = consumer;
fieldInfos = docWriter.getFieldInfos();
consumer.setFieldInfos(fieldInfos);
fieldsWriter = new StoredFieldsWriter(docWriter, fieldInfos);
}

View File

@ -315,13 +315,15 @@ final class DocumentsWriter {
}
private boolean closed;
private final FieldInfos fieldInfos;
DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain, int maxThreadStates) throws IOException {
DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain, int maxThreadStates, FieldInfos fieldInfos) throws IOException {
this.directory = directory;
this.writer = writer;
this.similarity = writer.getConfig().getSimilarity();
this.maxThreadStates = maxThreadStates;
flushedDocCount = writer.maxDoc();
this.fieldInfos = fieldInfos;
consumer = indexingChain.getChain(this);
if (consumer instanceof DocFieldProcessor) {
@ -329,10 +331,14 @@ final class DocumentsWriter {
}
}
public FieldInfos getFieldInfos() {
return fieldInfos;
}
/** Returns true if any of the fields in the current
* buffered docs have omitTermFreqAndPositions==false */
boolean hasProx() {
return (docFieldProcessor != null) ? docFieldProcessor.fieldInfos.hasProx()
return (docFieldProcessor != null) ? fieldInfos.hasProx()
: true;
}
@ -602,8 +608,8 @@ final class DocumentsWriter {
synchronized private void initFlushState(boolean onlyDocStore) {
initSegmentName(onlyDocStore);
final SegmentCodecs info = SegmentCodecs.build(docFieldProcessor.fieldInfos, writer.codecs);
flushState = new SegmentWriteState(infoStream, directory, segment, docFieldProcessor.fieldInfos,
final SegmentCodecs info = SegmentCodecs.build(fieldInfos, writer.codecs);
flushState = new SegmentWriteState(infoStream, directory, segment, fieldInfos,
docStoreSegment, numDocsInRAM, numDocsInStore, writer.getConfig().getTermIndexInterval(), info);
}

View File

@ -21,7 +21,6 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.PayloadProcessorProvider.DirPayloadProcessor;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
@ -31,7 +30,6 @@ import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.util.Constants;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.Version;
import org.apache.lucene.util.Bits;
import java.io.IOException;
@ -766,7 +764,7 @@ public class IndexWriter implements Closeable {
setRollbackSegmentInfos(segmentInfos);
docWriter = new DocumentsWriter(directory, this, conf.getIndexingChain(), conf.getMaxThreadStates());
docWriter = new DocumentsWriter(directory, this, conf.getIndexingChain(), conf.getMaxThreadStates(), getCurrentFieldInfos());
docWriter.setInfoStream(infoStream);
docWriter.setMaxFieldLength(maxFieldLength);
@ -809,7 +807,27 @@ public class IndexWriter implements Closeable {
}
}
}
private FieldInfos getCurrentFieldInfos() throws IOException {
final FieldInfos fieldInfos;
if (segmentInfos.size() > 0) {
SegmentInfo info = segmentInfos.info(segmentInfos.size()-1);
Directory cfsDir;
if (info.getUseCompoundFile()) {
cfsDir = new CompoundFileReader(directory, IndexFileNames.segmentFileName(info.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION));
} else {
cfsDir = directory;
}
fieldInfos = new FieldInfos(cfsDir, IndexFileNames.segmentFileName(info.name, "", IndexFileNames.FIELD_INFOS_EXTENSION));
if (info.getUseCompoundFile()) {
cfsDir.close();
}
} else {
fieldInfos = new FieldInfos();
}
return fieldInfos;
}
private synchronized void setRollbackSegmentInfos(SegmentInfos infos) {
rollbackSegmentInfos = (SegmentInfos) infos.clone();
rollbackSegments = new HashMap<SegmentInfo,Integer>();