mirror of https://github.com/apache/lucene.git
LUCENE-4055: remove redundant param
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1341974 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
97b082b3d8
commit
b0d40e65fc
|
@ -414,10 +414,6 @@ class DocumentsWriterPerThread {
|
|||
return numDocsInRAM;
|
||||
}
|
||||
|
||||
Codec getCodec() {
|
||||
return flushState.codec;
|
||||
}
|
||||
|
||||
/** Reset after a flush */
|
||||
private void doAfterFlush() throws IOException {
|
||||
segmentInfo = null;
|
||||
|
@ -453,7 +449,7 @@ class DocumentsWriterPerThread {
|
|||
assert deleteSlice == null : "all deletes must be applied in prepareFlush";
|
||||
flushState = new SegmentWriteState(infoStream, directory, segmentInfo, fieldInfos.finish(),
|
||||
numDocsInRAM, writer.getConfig().getTermIndexInterval(),
|
||||
codec, pendingDeletes, new IOContext(new FlushInfo(numDocsInRAM, bytesUsed())));
|
||||
pendingDeletes, new IOContext(new FlushInfo(numDocsInRAM, bytesUsed())));
|
||||
final double startMBUsed = parent.flushControl.netBytes() / 1024. / 1024.;
|
||||
|
||||
// Apply delete-by-docID now (delete-byDocID only
|
||||
|
|
|
@ -57,7 +57,7 @@ final class FreqProxTermsWriter extends TermsHashConsumer {
|
|||
// Sort by field name
|
||||
CollectionUtil.quickSort(allFields);
|
||||
|
||||
final FieldsConsumer consumer = state.codec.postingsFormat().fieldsConsumer(state);
|
||||
final FieldsConsumer consumer = state.segmentInfo.getCodec().postingsFormat().fieldsConsumer(state);
|
||||
|
||||
boolean success = false;
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ final class SegmentMerger {
|
|||
|
||||
final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, mergeState.segmentInfo,
|
||||
mergeState.fieldInfos, mergeState.mergedDocCount,
|
||||
termIndexInterval, codec, null, context);
|
||||
termIndexInterval, null, context);
|
||||
mergeTerms(segmentWriteState);
|
||||
mergePerDoc(segmentWriteState);
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@ package org.apache.lucene.index;
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.lucene.codecs.Codec;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
|
@ -45,7 +44,6 @@ public class SegmentWriteState {
|
|||
// Lazily created:
|
||||
public MutableBits liveDocs;
|
||||
|
||||
public final Codec codec;
|
||||
public final String segmentSuffix;
|
||||
|
||||
/** Expert: The fraction of terms in the "dictionary" which should be stored
|
||||
|
@ -58,7 +56,7 @@ public class SegmentWriteState {
|
|||
public final IOContext context;
|
||||
|
||||
public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos,
|
||||
int numDocs, int termIndexInterval, Codec codec, BufferedDeletes segDeletes, IOContext context) {
|
||||
int numDocs, int termIndexInterval, BufferedDeletes segDeletes, IOContext context) {
|
||||
this.infoStream = infoStream;
|
||||
this.segDeletes = segDeletes;
|
||||
this.directory = directory;
|
||||
|
@ -68,7 +66,6 @@ public class SegmentWriteState {
|
|||
this.fieldInfos = fieldInfos;
|
||||
this.numDocs = numDocs;
|
||||
this.termIndexInterval = termIndexInterval;
|
||||
this.codec = codec;
|
||||
segmentSuffix = "";
|
||||
this.context = context;
|
||||
}
|
||||
|
@ -84,7 +81,6 @@ public class SegmentWriteState {
|
|||
numDocs = state.numDocs;
|
||||
termIndexInterval = state.termIndexInterval;
|
||||
context = state.context;
|
||||
codec = state.codec;
|
||||
this.segmentSuffix = segmentSuffix;
|
||||
segDeletes = state.segDeletes;
|
||||
delCountOnFlush = state.delCountOnFlush;
|
||||
|
|
|
@ -620,7 +620,7 @@ public class TestCodecs extends LuceneTestCase {
|
|||
final int termIndexInterval = _TestUtil.nextInt(random(), 13, 27);
|
||||
final Codec codec = Codec.getDefault();
|
||||
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, -1, SEGMENT, false, null, false, codec, null, null);
|
||||
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, 10000, termIndexInterval, codec, null, newIOContext(random()));
|
||||
final SegmentWriteState state = new SegmentWriteState(InfoStream.getDefault(), dir, si, fieldInfos, 10000, termIndexInterval, null, newIOContext(random()));
|
||||
|
||||
final FieldsConsumer consumer = codec.postingsFormat().fieldsConsumer(state);
|
||||
Arrays.sort(fields);
|
||||
|
|
Loading…
Reference in New Issue