diff --git a/lucene/core/src/java/org/apache/lucene/codecs/BlockTermsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/BlockTermsWriter.java index a270ef60740..daa82c4bac0 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/BlockTermsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/BlockTermsWriter.java @@ -71,7 +71,7 @@ public class BlockTermsWriter extends FieldsConsumer { public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter, SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException { - final String termsFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, TERMS_EXTENSION); + final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); this.termsIndexWriter = termsIndexWriter; out = state.directory.createOutput(termsFileName, state.context); boolean success = false; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java index 0fdde4de9f4..e7f3da0b953 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java @@ -144,7 +144,7 @@ public class BlockTreeTermsWriter extends FieldsConsumer { throw new IllegalArgumentException("maxItemsInBlock must be at least 2*(minItemsInBlock-1); got maxItemsInBlock=" + maxItemsInBlock + " minItemsInBlock=" + minItemsInBlock); } - final String termsFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, TERMS_EXTENSION); + final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); out = state.directory.createOutput(termsFileName, state.context); boolean success = false; IndexOutput indexOut = null; @@ -156,7 +156,7 @@ public class BlockTreeTermsWriter extends FieldsConsumer { //DEBUG = state.segmentName.equals("_4a"); - final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, TERMS_INDEX_EXTENSION); + final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); indexOut = state.directory.createOutput(termsIndexFileName, state.context); writeIndexHeader(indexOut); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/FixedGapTermsIndexWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/FixedGapTermsIndexWriter.java index 853261f8734..0fcfcf8f693 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/FixedGapTermsIndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/FixedGapTermsIndexWriter.java @@ -57,7 +57,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase { @SuppressWarnings("unused") private final FieldInfos fieldInfos; // unread public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException { - final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, TERMS_INDEX_EXTENSION); + final String indexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); termIndexInterval = state.termIndexInterval; out = state.directory.createOutput(indexFileName, state.context); boolean success = false; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsFormat.java index 46832fbc7ed..bbd5f1d28b4 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsFormat.java @@ -25,6 +25,8 @@ import org.apache.lucene.store.IOContext; * limitations under the License. */ +// nocommit fix other formats to get SI/FIS too... + /** * Controls the format of stored fields */ @@ -35,5 +37,5 @@ public abstract class StoredFieldsFormat { /** Returns a {@link StoredFieldsWriter} to write stored * fields. */ - public abstract StoredFieldsWriter fieldsWriter(Directory directory, String segment, IOContext context) throws IOException; + public abstract StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java index 01a3924ef54..e15eb2b4d73 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/StoredFieldsWriter.java @@ -1,15 +1,5 @@ package org.apache.lucene.codecs; -import java.io.Closeable; -import java.io.IOException; - -import org.apache.lucene.document.Document; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.MergeState; -import org.apache.lucene.util.Bits; - /** * Copyright 2004 The Apache Software Foundation * @@ -26,6 +16,16 @@ import org.apache.lucene.util.Bits; * the License. */ +import java.io.Closeable; +import java.io.IOException; + +import org.apache.lucene.document.Document; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.util.Bits; + /** * Codec API for writing stored fields: *

@@ -63,7 +63,7 @@ public abstract class StoredFieldsWriter implements Closeable { * calls to {@link #startDocument(int)}, but a Codec should * check that this is the case to detect the JRE bug described * in LUCENE-1282. */ - public abstract void finish(int numDocs) throws IOException; + public abstract void finish(FieldInfos fis, int numDocs) throws IOException; /** Merges in the stored fields from the readers in * mergeState. The default implementation skips @@ -94,7 +94,7 @@ public abstract class StoredFieldsWriter implements Closeable { mergeState.checkAbort.work(300); } } - finish(docCount); + finish(mergeState.fieldInfos, docCount); return docCount; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/VariableGapTermsIndexWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/VariableGapTermsIndexWriter.java index bb331d71906..084dfb315ee 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/VariableGapTermsIndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/VariableGapTermsIndexWriter.java @@ -173,7 +173,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase { // in the extremes. public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException { - final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, TERMS_INDEX_EXTENSION); + final String indexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); out = state.directory.createOutput(indexFileName, state.context); boolean success = false; try { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xStoredFieldsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xStoredFieldsFormat.java index b37d18d24d5..14f8a7c5bc9 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xStoredFieldsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene3x/Lucene3xStoredFieldsFormat.java @@ -39,7 +39,7 @@ class Lucene3xStoredFieldsFormat extends StoredFieldsFormat { } @Override - public StoredFieldsWriter fieldsWriter(Directory directory, String segment, + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { throw new UnsupportedOperationException("this codec can only be used for reading"); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsWriter.java index 64a3137782e..c51a6267d73 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40PostingsWriter.java @@ -99,7 +99,7 @@ public final class Lucene40PostingsWriter extends PostingsWriterBase { this.skipInterval = skipInterval; this.skipMinimum = skipInterval; /* set to the same for now */ // this.segment = state.segmentName; - String fileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION); + String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION); freqOut = state.directory.createOutput(fileName, state.context); boolean success = false; try { @@ -110,7 +110,7 @@ public final class Lucene40PostingsWriter extends PostingsWriterBase { if (state.fieldInfos.hasProx()) { // At least one field does not omit TF, so create the // prox file - fileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION); + fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION); proxOut = state.directory.createOutput(fileName, state.context); } else { // Every field omits TF so we will write no prox file diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsFormat.java index dac8dacc30b..09a1d1866c3 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsFormat.java @@ -86,8 +86,8 @@ public class Lucene40StoredFieldsFormat extends StoredFieldsFormat { } @Override - public StoredFieldsWriter fieldsWriter(Directory directory, String segment, + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { - return new Lucene40StoredFieldsWriter(directory, segment, context); + return new Lucene40StoredFieldsWriter(directory, si.name, context); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsWriter.java index c236d9c1042..d7a83154e34 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/Lucene40StoredFieldsWriter.java @@ -23,6 +23,7 @@ import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.document.Document; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.MergePolicy.MergeAbortedException; @@ -208,7 +209,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter { } @Override - public void finish(int numDocs) throws IOException { + public void finish(FieldInfos fis, int numDocs) throws IOException { if (4+((long) numDocs)*8 != indexStream.getFilePointer()) // This is most likely a bug in Sun JRE 1.6.0_04/_05; // we detect that the bug has struck, here, and @@ -244,7 +245,7 @@ public final class Lucene40StoredFieldsWriter extends StoredFieldsWriter { reader, matchingFieldsReader, rawDocLengths); } } - finish(docCount); + finish(mergeState.fieldInfos, docCount); return docCount; } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/values/DocValuesWriterBase.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/values/DocValuesWriterBase.java index aeeb23083e5..b8d2c0a7f6e 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/lucene40/values/DocValuesWriterBase.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene40/values/DocValuesWriterBase.java @@ -67,7 +67,8 @@ public abstract class DocValuesWriterBase extends PerDocConsumer { * docvalues of type {@link Type#BYTES_FIXED_SORTED} and {@link Type#BYTES_VAR_SORTED}. */ protected DocValuesWriterBase(PerDocWriteState state, boolean fasterButMoreRam) { - this.segmentName = state.segmentName; + // nocommit save away SegmentInfo instead? + this.segmentName = state.segmentInfo.name; this.bytesUsed = state.bytesUsed; this.context = state.context; this.fasterButMoreRam = fasterButMoreRam; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java index 431b14af753..e9231a5991f 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/memory/MemoryPostingsFormat.java @@ -285,7 +285,7 @@ public class MemoryPostingsFormat extends PostingsFormat { @Override public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - final String fileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, EXTENSION); + final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); final IndexOutput out = state.directory.createOutput(fileName, state.context); return new FieldsConsumer() { diff --git a/lucene/core/src/java/org/apache/lucene/codecs/sep/SepPostingsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/sep/SepPostingsWriter.java index 4a20c10dd5d..4da0407fd55 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/sep/SepPostingsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/sep/SepPostingsWriter.java @@ -115,27 +115,27 @@ public final class SepPostingsWriter extends PostingsWriterBase { try { this.skipInterval = skipInterval; this.skipMinimum = skipInterval; /* set to the same for now */ - final String docFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, DOC_EXTENSION); + final String docFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, DOC_EXTENSION); docOut = factory.createOutput(state.directory, docFileName, state.context); docIndex = docOut.index(); if (state.fieldInfos.hasFreq()) { - final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, FREQ_EXTENSION); + final String frqFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, FREQ_EXTENSION); freqOut = factory.createOutput(state.directory, frqFileName, state.context); freqIndex = freqOut.index(); } if (state.fieldInfos.hasProx()) { - final String posFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, POS_EXTENSION); + final String posFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, POS_EXTENSION); posOut = factory.createOutput(state.directory, posFileName, state.context); posIndex = posOut.index(); // TODO: -- only if at least one field stores payloads? - final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, PAYLOAD_EXTENSION); + final String payloadFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, PAYLOAD_EXTENSION); payloadOut = state.directory.createOutput(payloadFileName, state.context); } - final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, SKIP_EXTENSION); + final String skipFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SKIP_EXTENSION); skipOut = state.directory.createOutput(skipFileName, state.context); totalNumDocs = state.numDocs; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsWriter.java index cab11954b47..df345319061 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextFieldsWriter.java @@ -46,7 +46,7 @@ class SimpleTextFieldsWriter extends FieldsConsumer { final static BytesRef PAYLOAD = new BytesRef(" payload "); public SimpleTextFieldsWriter(SegmentWriteState state) throws IOException { - final String fileName = SimpleTextPostingsFormat.getPostingsFileName(state.segmentName, state.segmentSuffix); + final String fileName = SimpleTextPostingsFormat.getPostingsFileName(state.segmentInfo.name, state.segmentSuffix); out = state.directory.createOutput(fileName, state.context); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPerDocConsumer.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPerDocConsumer.java index 7f31c9c2f0d..fe5b45fb067 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPerDocConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextPerDocConsumer.java @@ -51,7 +51,7 @@ class SimpleTextPerDocConsumer extends PerDocConsumer { @Override public DocValuesConsumer addValuesField(Type type, FieldInfo field) throws IOException { - return new SimpleTextDocValuesConsumer(SimpleTextDocValuesFormat.docValuesId(state.segmentName, + return new SimpleTextDocValuesConsumer(SimpleTextDocValuesFormat.docValuesId(state.segmentInfo.name, field.number), state.directory, state.context, type, segmentSuffix); } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsFormat.java index 7d6e11745e8..3a84caa6ae5 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsFormat.java @@ -42,7 +42,7 @@ public class SimpleTextStoredFieldsFormat extends StoredFieldsFormat { } @Override - public StoredFieldsWriter fieldsWriter(Directory directory, String segment, IOContext context) throws IOException { - return new SimpleTextStoredFieldsWriter(directory, segment, context); + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo si, IOContext context) throws IOException { + return new SimpleTextStoredFieldsWriter(directory, si.name, context); } } diff --git a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsWriter.java b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsWriter.java index a9efb2dbff3..717f612f649 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsWriter.java @@ -21,6 +21,7 @@ import java.io.IOException; import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexableField; import org.apache.lucene.store.Directory; @@ -163,7 +164,7 @@ public class SimpleTextStoredFieldsWriter extends StoredFieldsWriter { } @Override - public void finish(int numDocs) throws IOException { + public void finish(FieldInfos fis, int numDocs) throws IOException { if (numDocsWritten != numDocs) { throw new RuntimeException("mergeFields produced an invalid result: docCount is " + numDocs + " but only saw " + numDocsWritten + " file=" + out.toString() + "; now aborting this merge to prevent index corruption"); diff --git a/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java b/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java index 133692e3962..c786c3c258e 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocFieldProcessor.java @@ -92,7 +92,7 @@ final class DocFieldProcessor extends DocConsumer { // FreqProxTermsWriter does this with // FieldInfo.storePayload. FieldInfosWriter infosWriter = codec.fieldInfosFormat().getFieldInfosWriter(); - infosWriter.write(state.directory, state.segmentName, state.fieldInfos, IOContext.DEFAULT); + infosWriter.write(state.directory, state.segmentInfo.name, state.fieldInfos, IOContext.DEFAULT); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java index 3a24a727f50..8bb82c10b15 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriter.java @@ -494,6 +494,7 @@ final class DocumentsWriter { private void publishFlushedSegment(FlushedSegment newSegment, FrozenBufferedDeletes globalPacket) throws IOException { assert newSegment != null; + assert newSegment.segmentInfo != null; final SegmentInfoPerCommit segInfo = indexWriter.prepareFlushedSegment(newSegment); final BufferedDeletes deletes = newSegment.segmentDeletes; if (infoStream.isEnabled("DW")) { diff --git a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java index bda5dee686c..61c078cc3b6 100644 --- a/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java +++ b/lucene/core/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java @@ -172,7 +172,7 @@ class DocumentsWriterPerThread { SegmentWriteState flushState; //Deletes for our still-in-RAM (to be flushed next) segment BufferedDeletes pendingDeletes; - String segment; // Current segment we are working on + SegmentInfo segmentInfo; // Current segment we are working on boolean aborting = false; // True if an abort is pending boolean hasAborted = false; // True if the last exception throws by #updateDocument was aborting @@ -231,17 +231,11 @@ class DocumentsWriterPerThread { docState.doc = doc; docState.analyzer = analyzer; docState.docID = numDocsInRAM; - if (segment == null) { - // this call is synchronized on IndexWriter.segmentInfos - segment = writer.newSegmentName(); - assert numDocsInRAM == 0; - if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue); - } - + if (segmentInfo == null) { + initSegmentInfo(); } if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment); + infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segmentInfo.name); } boolean success = false; try { @@ -273,21 +267,28 @@ class DocumentsWriterPerThread { } finishDocument(delTerm); } + + private void initSegmentInfo() { + String segment = writer.newSegmentName(); + segmentInfo = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segment, 0, + -1, segment, false, null, false, + codec, + null, null); + assert numDocsInRAM == 0; + if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { + infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue); + } + } public int updateDocuments(Iterable> docs, Analyzer analyzer, Term delTerm) throws IOException { assert writer.testPoint("DocumentsWriterPerThread addDocuments start"); assert deleteQueue != null; docState.analyzer = analyzer; - if (segment == null) { - // this call is synchronized on IndexWriter.segmentInfos - segment = writer.newSegmentName(); - assert numDocsInRAM == 0; - if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", Thread.currentThread().getName() + " init seg=" + segment + " delQueue=" + deleteQueue); - } + if (segmentInfo == null) { + initSegmentInfo(); } if (INFO_VERBOSE && infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segment); + infoStream.message("DWPT", Thread.currentThread().getName() + " update delTerm=" + delTerm + " docID=" + docState.docID + " seg=" + segmentInfo.name); } int docCount = 0; try { @@ -419,7 +420,7 @@ class DocumentsWriterPerThread { /** Reset after a flush */ private void doAfterFlush() throws IOException { - segment = null; + segmentInfo = null; consumer.doAfterFlush(); directory.getCreatedFiles().clear(); fieldInfos = new FieldInfos.Builder(fieldInfos.globalFieldNumbers); @@ -450,7 +451,7 @@ class DocumentsWriterPerThread { FlushedSegment flush() throws IOException { assert numDocsInRAM > 0; assert deleteSlice == null : "all deletes must be applied in prepareFlush"; - flushState = new SegmentWriteState(infoStream, directory, segment, fieldInfos.finish(), + flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.finish(), numDocsInRAM, writer.getConfig().getTermIndexInterval(), codec, pendingDeletes, new IOContext(new FlushInfo(numDocsInRAM, bytesUsed()))); final double startMBUsed = parent.flushControl.netBytes() / 1024. / 1024.; @@ -469,7 +470,7 @@ class DocumentsWriterPerThread { } if (infoStream.isEnabled("DWPT")) { - infoStream.message("DWPT", "flush postings as segment " + flushState.segmentName + " numDocs=" + numDocsInRAM); + infoStream.message("DWPT", "flush postings as segment " + flushState.segmentInfo.name + " numDocs=" + numDocsInRAM); } if (aborting) { @@ -484,11 +485,8 @@ class DocumentsWriterPerThread { try { consumer.flush(flushState); pendingDeletes.terms.clear(); - final SegmentInfo newSegment = new SegmentInfo(directoryOrig, Constants.LUCENE_MAIN_VERSION, segment, flushState.numDocs, - -1, segment, false, null, false, - flushState.codec, - null, null); - newSegment.setFiles(new HashSet(directory.getCreatedFiles())); + segmentInfo.docCount = flushState.numDocs; + segmentInfo.setFiles(new HashSet(directory.getCreatedFiles())); if (infoStream.isEnabled("DWPT")) { infoStream.message("DWPT", "new segment has " + (flushState.liveDocs == null ? 0 : (flushState.numDocs - flushState.delCountOnFlush)) + " deleted docs"); @@ -498,8 +496,8 @@ class DocumentsWriterPerThread { (flushState.fieldInfos.hasDocValues() ? "docValues" : "no docValues") + "; " + (flushState.fieldInfos.hasProx() ? "prox" : "no prox") + "; " + (flushState.fieldInfos.hasFreq() ? "freqs" : "no freqs")); - infoStream.message("DWPT", "flushedFiles=" + newSegment.files()); - infoStream.message("DWPT", "flushed codec=" + newSegment.getCodec()); + infoStream.message("DWPT", "flushedFiles=" + segmentInfo.files()); + infoStream.message("DWPT", "flushed codec=" + codec); } flushedDocCount += flushState.numDocs; @@ -514,22 +512,26 @@ class DocumentsWriterPerThread { } if (infoStream.isEnabled("DWPT")) { - final double newSegmentSize = newSegment.sizeInBytes()/1024./1024.; - infoStream.message("DWPT", "flushed: segment=" + newSegment + + final double newSegmentSize = segmentInfo.sizeInBytes()/1024./1024.; + infoStream.message("DWPT", "flushed: segment=" + segmentInfo.name + " ramUsed=" + nf.format(startMBUsed) + " MB" + " newFlushedSize(includes docstores)=" + nf.format(newSegmentSize) + " MB" + " docs/MB=" + nf.format(flushedDocCount / newSegmentSize)); } + + assert segmentInfo != null; + + FlushedSegment fs = new FlushedSegment(new SegmentInfoPerCommit(segmentInfo, 0, -1L), flushState.fieldInfos, + segmentDeletes, flushState.liveDocs, flushState.delCountOnFlush); doAfterFlush(); success = true; - return new FlushedSegment(new SegmentInfoPerCommit(newSegment, 0, -1L), flushState.fieldInfos, - segmentDeletes, flushState.liveDocs, flushState.delCountOnFlush); + return fs; } finally { if (!success) { - if (segment != null) { + if (segmentInfo != null) { synchronized(parent.indexWriter) { - parent.indexWriter.deleter.refresh(segment); + parent.indexWriter.deleter.refresh(segmentInfo.name); } } abort(); @@ -537,9 +539,9 @@ class DocumentsWriterPerThread { } } - /** Get current segment name we are writing. */ - String getSegment() { - return segment; + /** Get current segment info we are writing. */ + SegmentInfo getSegmentInfo() { + return segmentInfo; } long bytesUsed() { @@ -572,14 +574,14 @@ class DocumentsWriterPerThread { } PerDocWriteState newPerDocWriteState(String segmentSuffix) { - assert segment != null; - return new PerDocWriteState(infoStream, directory, segment, bytesUsed, segmentSuffix, IOContext.DEFAULT); + assert segmentInfo != null; + return new PerDocWriteState(infoStream, directory, segmentInfo, bytesUsed, segmentSuffix, IOContext.DEFAULT); } @Override public String toString() { return "DocumentsWriterPerThread [pendingDeletes=" + pendingDeletes - + ", segment=" + segment + ", aborting=" + aborting + ", numDocsInRAM=" + + ", segment=" + (segmentInfo != null ? segmentInfo.name : "null") + ", aborting=" + aborting + ", numDocsInRAM=" + numDocsInRAM + ", deleteQueue=" + deleteQueue + "]"; } } diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index 1d7bd8565cf..3e097d8be6b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -2288,7 +2288,12 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { // TODO: somehow we should fix this merge so it's // abortable so that IW.close(false) is able to stop it TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(directory); - SegmentMerger merger = new SegmentMerger(infoStream, trackingDir, config.getTermIndexInterval(), + + SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, 0, + -1, mergedName, false, null, false, + codec, null, null); + + SegmentMerger merger = new SegmentMerger(info, infoStream, trackingDir, config.getTermIndexInterval(), mergedName, MergeState.CheckAbort.NONE, payloadProcessorProvider, new FieldInfos.Builder(globalFieldNumberMap), codec, context); @@ -2297,9 +2302,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { } MergeState mergeState = merger.merge(); // merge 'em - SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, mergeState.mergedDocCount, - -1, mergedName, false, null, false, - codec, null, null); + info.docCount = mergeState.mergedDocCount; + SegmentInfoPerCommit infoPerCommit = new SegmentInfoPerCommit(info, 0, -1L); info.setFiles(new HashSet(trackingDir.getCreatedFiles())); @@ -3433,7 +3437,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit { final MergeState.CheckAbort checkAbort = new MergeState.CheckAbort(merge, directory); final TrackingDirectoryWrapper dirWrapper = new TrackingDirectoryWrapper(directory); - SegmentMerger merger = new SegmentMerger(infoStream, dirWrapper, config.getTermIndexInterval(), mergedName, checkAbort, + + SegmentMerger merger = new SegmentMerger(merge.info.info, infoStream, dirWrapper, config.getTermIndexInterval(), mergedName, checkAbort, payloadProcessorProvider, new FieldInfos.Builder(globalFieldNumberMap), codec, context); if (infoStream.isEnabled("IW")) { diff --git a/lucene/core/src/java/org/apache/lucene/index/MergeState.java b/lucene/core/src/java/org/apache/lucene/index/MergeState.java index b42661b397b..b1d1cdb7487 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MergeState.java +++ b/lucene/core/src/java/org/apache/lucene/index/MergeState.java @@ -40,6 +40,7 @@ public class MergeState { } } + public SegmentInfo segmentInfo; public FieldInfos fieldInfos; public List readers; // Readers & liveDocs being merged public int[][] docMaps; // Maps docIDs around deletions diff --git a/lucene/core/src/java/org/apache/lucene/index/PerDocWriteState.java b/lucene/core/src/java/org/apache/lucene/index/PerDocWriteState.java index 64e0b955ee5..87ebdf419d8 100644 --- a/lucene/core/src/java/org/apache/lucene/index/PerDocWriteState.java +++ b/lucene/core/src/java/org/apache/lucene/index/PerDocWriteState.java @@ -32,17 +32,17 @@ import org.apache.lucene.util.InfoStream; public class PerDocWriteState { public final InfoStream infoStream; public final Directory directory; - public final String segmentName; + public final SegmentInfo segmentInfo; public final Counter bytesUsed; public final String segmentSuffix; public final IOContext context; public PerDocWriteState(InfoStream infoStream, Directory directory, - String segmentName, Counter bytesUsed, + SegmentInfo segmentInfo, Counter bytesUsed, String segmentSuffix, IOContext context) { this.infoStream = infoStream; this.directory = directory; - this.segmentName = segmentName; + this.segmentInfo = segmentInfo; this.segmentSuffix = segmentSuffix; this.bytesUsed = bytesUsed; this.context = context; @@ -51,7 +51,7 @@ public class PerDocWriteState { public PerDocWriteState(SegmentWriteState state) { infoStream = state.infoStream; directory = state.directory; - segmentName = state.segmentName; + segmentInfo = state.segmentInfo; segmentSuffix = state.segmentSuffix; bytesUsed = Counter.newCounter(); context = state.context; @@ -60,7 +60,7 @@ public class PerDocWriteState { public PerDocWriteState(PerDocWriteState state, String segmentSuffix) { this.infoStream = state.infoStream; this.directory = state.directory; - this.segmentName = state.segmentName; + this.segmentInfo = state.segmentInfo; this.segmentSuffix = segmentSuffix; this.bytesUsed = state.bytesUsed; this.context = state.context; diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java index d336a2518a9..46849d5fe50 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java @@ -55,8 +55,13 @@ final class SegmentMerger { private final MergeState mergeState = new MergeState(); private final FieldInfos.Builder fieldInfosBuilder; - - SegmentMerger(InfoStream infoStream, Directory dir, int termIndexInterval, String name, MergeState.CheckAbort checkAbort, PayloadProcessorProvider payloadProcessorProvider, FieldInfos.Builder fieldInfosBuilder, Codec codec, IOContext context) { + + // nocommit nuke name since SI has it.... but Directory is + // NOT the same!! + SegmentMerger(SegmentInfo segmentInfo, InfoStream infoStream, Directory dir, int termIndexInterval, String name, + MergeState.CheckAbort checkAbort, PayloadProcessorProvider payloadProcessorProvider, + FieldInfos.Builder fieldInfosBuilder, Codec codec, IOContext context) { + mergeState.segmentInfo = segmentInfo; mergeState.infoStream = infoStream; mergeState.readers = new ArrayList(); mergeState.checkAbort = checkAbort; @@ -107,12 +112,14 @@ final class SegmentMerger { mergeState.mergedDocCount = setDocMaps(); - mergeFieldInfos(); + mergeDocValuesAndNormsFieldInfos(); setMatchingSegmentReaders(); int numMerged = mergeFields(); assert numMerged == mergeState.mergedDocCount; - final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, segment, mergeState.fieldInfos, mergeState.mergedDocCount, termIndexInterval, codec, null, context); + final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, mergeState.segmentInfo, + mergeState.fieldInfos, mergeState.mergedDocCount, + termIndexInterval, codec, null, context); mergeTerms(segmentWriteState); mergePerDoc(segmentWriteState); @@ -192,10 +199,6 @@ final class SegmentMerger { } } - private void mergeFieldInfos() throws IOException { - mergeDocValuesAndNormsFieldInfos(); - } - // NOTE: this is actually merging all the fieldinfos public void mergeDocValuesAndNormsFieldInfos() throws IOException { // mapping from all docvalues fields found to their promoted types @@ -261,7 +264,7 @@ final class SegmentMerger { * @throws IOException if there is a low-level IO error */ private int mergeFields() throws CorruptIndexException, IOException { - final StoredFieldsWriter fieldsWriter = codec.storedFieldsFormat().fieldsWriter(directory, segment, context); + final StoredFieldsWriter fieldsWriter = codec.storedFieldsFormat().fieldsWriter(directory, mergeState.segmentInfo, context); try { return fieldsWriter.merge(mergeState); diff --git a/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java b/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java index 9a0eb85137e..16e531c6d27 100644 --- a/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java +++ b/lucene/core/src/java/org/apache/lucene/index/SegmentWriteState.java @@ -30,7 +30,7 @@ import org.apache.lucene.util.MutableBits; public class SegmentWriteState { public final InfoStream infoStream; public final Directory directory; - public final String segmentName; + public final SegmentInfo segmentInfo; public final FieldInfos fieldInfos; public final int numDocs; public int delCountOnFlush; @@ -57,12 +57,14 @@ public class SegmentWriteState { public final IOContext context; - public SegmentWriteState(InfoStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos, + public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos, int numDocs, int termIndexInterval, Codec codec, BufferedDeletes segDeletes, IOContext context) { this.infoStream = infoStream; this.segDeletes = segDeletes; this.directory = directory; - this.segmentName = segmentName; + // nocommit a lot of this is redundant w/ SI! BUT not + // the Directory!!!! one is tracking one is not!!! + this.segmentInfo = segmentInfo; this.fieldInfos = fieldInfos; this.numDocs = numDocs; this.termIndexInterval = termIndexInterval; @@ -77,7 +79,7 @@ public class SegmentWriteState { public SegmentWriteState(SegmentWriteState state, String segmentSuffix) { infoStream = state.infoStream; directory = state.directory; - segmentName = state.segmentName; + segmentInfo = state.segmentInfo; fieldInfos = state.fieldInfos; numDocs = state.numDocs; termIndexInterval = state.termIndexInterval; diff --git a/lucene/core/src/java/org/apache/lucene/index/StoredFieldsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/StoredFieldsConsumer.java index 22cc289f15d..be64d2a9a36 100644 --- a/lucene/core/src/java/org/apache/lucene/index/StoredFieldsConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/index/StoredFieldsConsumer.java @@ -69,7 +69,7 @@ final class StoredFieldsConsumer { if (fieldsWriter != null) { try { - fieldsWriter.finish(state.numDocs); + fieldsWriter.finish(state.fieldInfos, state.numDocs); } finally { fieldsWriter.close(); fieldsWriter = null; @@ -80,7 +80,7 @@ final class StoredFieldsConsumer { private synchronized void initFieldsWriter(IOContext context) throws IOException { if (fieldsWriter == null) { - fieldsWriter = codec.storedFieldsFormat().fieldsWriter(docWriter.directory, docWriter.getSegment(), context); + fieldsWriter = codec.storedFieldsFormat().fieldsWriter(docWriter.directory, docWriter.getSegmentInfo(), context); lastDocID = 0; } } diff --git a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java index 4e525a3c5d1..ed20115ada0 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java +++ b/lucene/core/src/java/org/apache/lucene/index/TermVectorsConsumer.java @@ -54,7 +54,7 @@ final class TermVectorsConsumer extends TermsHashConsumer { // At least one doc in this run had term vectors enabled try { fill(state.numDocs); - assert state.segmentName != null; + assert state.segmentInfo != null; writer.finish(state.numDocs); } finally { IOUtils.close(writer); @@ -84,7 +84,7 @@ final class TermVectorsConsumer extends TermsHashConsumer { private final void initTermVectorsWriter() throws IOException { if (writer == null) { IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed())); - writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegment(), context); + writer = docWriter.codec.termVectorsFormat().vectorsWriter(docWriter.directory, docWriter.getSegmentInfo().name, context); lastDocID = 0; } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index 8544f27e5b1..8fd14a5bbd9 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -619,7 +619,8 @@ public class TestCodecs extends LuceneTestCase { final int termIndexInterval = _TestUtil.nextInt(random(), 13, 27); final Codec codec = Codec.getDefault(); - final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codec, null, newIOContext(random())); + final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, -1, SEGMENT, false, null, false, codec, null, null); + final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, 10000, termIndexInterval, codec, null, newIOContext(random())); final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state); Arrays.sort(fields); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java index 89158a38640..a470a5c9359 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDoc.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDoc.java @@ -197,7 +197,10 @@ public class TestDoc extends LuceneTestCase { final Codec codec = Codec.getDefault(); TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir); - SegmentMerger merger = new SegmentMerger(InfoStream.getDefault(), trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, merged, MergeState.CheckAbort.NONE, null, new FieldInfos.Builder(), codec, context); + final SegmentInfo si = new SegmentInfo(si1.info.dir, Constants.LUCENE_MAIN_VERSION, merged, 10000, -1, merged, false, null, false, codec, null, null); + + SegmentMerger merger = new SegmentMerger(si, InfoStream.getDefault(), trackingDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, + merged, MergeState.CheckAbort.NONE, null, new FieldInfos.Builder(), codec, context); merger.add(r1); merger.add(r2); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java index 36a5d6732f0..2a00b422276 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java @@ -77,7 +77,10 @@ public class TestSegmentMerger extends LuceneTestCase { public void testMerge() throws IOException { final Codec codec = Codec.getDefault(); - SegmentMerger merger = new SegmentMerger(InfoStream.getDefault(), mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, mergedSegment, MergeState.CheckAbort.NONE, null, new FieldInfos.Builder(), codec, newIOContext(random())); + final SegmentInfo si = new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, 10000, -1, mergedSegment, false, null, false, codec, null, null); + + SegmentMerger merger = new SegmentMerger(si, InfoStream.getDefault(), mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, + mergedSegment, MergeState.CheckAbort.NONE, null, new FieldInfos.Builder(), codec, newIOContext(random())); merger.add(reader1); merger.add(reader2); MergeState mergeState = merger.merge(); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWFieldsWriter.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWFieldsWriter.java index cd4383cd873..9718b7ec101 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWFieldsWriter.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWFieldsWriter.java @@ -43,13 +43,13 @@ class PreFlexRWFieldsWriter extends FieldsConsumer { public PreFlexRWFieldsWriter(SegmentWriteState state) throws IOException { termsOut = new TermInfosWriter(state.directory, - state.segmentName, + state.segmentInfo.name, state.fieldInfos, state.termIndexInterval); boolean success = false; try { - final String freqFile = IndexFileNames.segmentFileName(state.segmentName, "", Lucene3xPostingsFormat.FREQ_EXTENSION); + final String freqFile = IndexFileNames.segmentFileName(state.segmentInfo.name, "", Lucene3xPostingsFormat.FREQ_EXTENSION); freqOut = state.directory.createOutput(freqFile, state.context); totalNumDocs = state.numDocs; success = true; @@ -62,7 +62,7 @@ class PreFlexRWFieldsWriter extends FieldsConsumer { success = false; try { if (state.fieldInfos.hasProx()) { - final String proxFile = IndexFileNames.segmentFileName(state.segmentName, "", Lucene3xPostingsFormat.PROX_EXTENSION); + final String proxFile = IndexFileNames.segmentFileName(state.segmentInfo.name, "", Lucene3xPostingsFormat.PROX_EXTENSION); proxOut = state.directory.createOutput(proxFile, state.context); } else { proxOut = null; diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWNormsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWNormsFormat.java index f5496802107..21e7d406c3f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWNormsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWNormsFormat.java @@ -28,7 +28,6 @@ class PreFlexRWNormsFormat extends Lucene3xNormsFormat { @Override public PerDocConsumer docsConsumer(PerDocWriteState state) throws IOException { - return new PreFlexRWNormsConsumer(state.directory, state.segmentName, state.context); + return new PreFlexRWNormsConsumer(state.directory, state.segmentInfo.name, state.context); } - } diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsFormat.java index ef3a2296d8f..866529308f8 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsFormat.java @@ -20,14 +20,15 @@ package org.apache.lucene.codecs.lucene3x; import java.io.IOException; import org.apache.lucene.codecs.StoredFieldsWriter; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; class PreFlexRWStoredFieldsFormat extends Lucene3xStoredFieldsFormat { @Override - public StoredFieldsWriter fieldsWriter(Directory directory, String segment, IOContext context) throws IOException { - return new PreFlexRWStoredFieldsWriter(directory, segment, context); + public StoredFieldsWriter fieldsWriter(Directory directory, SegmentInfo segmentInfo, IOContext context) throws IOException { + return new PreFlexRWStoredFieldsWriter(directory, segmentInfo.name, context); } - } diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsWriter.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsWriter.java index a580b124716..d14eb01ac9a 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsWriter.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/lucene3x/PreFlexRWStoredFieldsWriter.java @@ -20,6 +20,7 @@ import java.io.IOException; import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexableField; import org.apache.lucene.store.Directory; @@ -143,7 +144,7 @@ final class PreFlexRWStoredFieldsWriter extends StoredFieldsWriter { } @Override - public void finish(int numDocs) throws IOException { + public void finish(FieldInfos fis, int numDocs) throws IOException { if (4+((long) numDocs)*8 != indexStream.getFilePointer()) // This is most likely a bug in Sun JRE 1.6.0_04/_05; // we detect that the bug has struck, here, and diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java index 121af3c6037..9a32448a86d 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/mockrandom/MockRandomPostingsFormat.java @@ -138,10 +138,10 @@ public class MockRandomPostingsFormat extends PostingsFormat { final long seed = seedRandom.nextLong(); if (LuceneTestCase.VERBOSE) { - System.out.println("MockRandomCodec: writing to seg=" + state.segmentName + " formatID=" + state.segmentSuffix + " seed=" + seed); + System.out.println("MockRandomCodec: writing to seg=" + state.segmentInfo.name + " formatID=" + state.segmentSuffix + " seed=" + seed); } - final String seedFileName = IndexFileNames.segmentFileName(state.segmentName, state.segmentSuffix, SEED_EXT); + final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT); final IndexOutput out = state.directory.createOutput(seedFileName, state.context); try { out.writeLong(seed); diff --git a/lucene/test-framework/src/java/org/apache/lucene/codecs/ramonly/RAMOnlyPostingsFormat.java b/lucene/test-framework/src/java/org/apache/lucene/codecs/ramonly/RAMOnlyPostingsFormat.java index 332b9e99629..e406afe6ff6 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/codecs/ramonly/RAMOnlyPostingsFormat.java +++ b/lucene/test-framework/src/java/org/apache/lucene/codecs/ramonly/RAMOnlyPostingsFormat.java @@ -540,7 +540,7 @@ public class RAMOnlyPostingsFormat extends PostingsFormat { // TODO -- ok to do this up front instead of // on close....? should be ok? // Write our ID: - final String idFileName = IndexFileNames.segmentFileName(writeState.segmentName, writeState.segmentSuffix, ID_EXTENSION); + final String idFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name, writeState.segmentSuffix, ID_EXTENSION); IndexOutput out = writeState.directory.createOutput(idFileName, writeState.context); boolean success = false; try {