LUCENE-4055: remove redundant mergedDocCount

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1342009 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-05-23 20:06:55 +00:00
parent 7508958f63
commit 1334bd6c15
7 changed files with 8 additions and 11 deletions

View File

@ -103,7 +103,7 @@ public abstract class DocValuesConsumer {
}
// only finish if no exception is thrown!
if (hasMerged) {
finish(mergeState.mergedDocCount);
finish(mergeState.segmentInfo.getDocCount());
}
}

View File

@ -79,7 +79,7 @@ public abstract class TermsConsumer {
long sumTotalTermFreq = 0;
long sumDocFreq = 0;
long sumDFsinceLastAbortCheck = 0;
FixedBitSet visitedDocs = new FixedBitSet(mergeState.mergedDocCount);
FixedBitSet visitedDocs = new FixedBitSet(mergeState.segmentInfo.getDocCount());
IndexOptions indexOptions = mergeState.fieldInfo.getIndexOptions();
if (indexOptions == IndexOptions.DOCS_ONLY) {

View File

@ -68,7 +68,7 @@ class FixedSortedBytesImpl {
throws IOException {
boolean success = false;
try {
final MergeContext ctx = SortedBytesMergeUtils.init(Type.BYTES_FIXED_SORTED, docValues, comp, mergeState.mergedDocCount);
final MergeContext ctx = SortedBytesMergeUtils.init(Type.BYTES_FIXED_SORTED, docValues, comp, mergeState.segmentInfo.getDocCount());
List<SortedSourceSlice> slices = SortedBytesMergeUtils.buildSlices(mergeState.docBase, mergeState.docMaps, docValues, ctx);
final IndexOutput datOut = getOrCreateDataOut();
datOut.writeInt(ctx.sizePerValues);

View File

@ -71,7 +71,7 @@ final class VarSortedBytesImpl {
throws IOException {
boolean success = false;
try {
MergeContext ctx = SortedBytesMergeUtils.init(Type.BYTES_VAR_SORTED, docValues, comp, mergeState.mergedDocCount);
MergeContext ctx = SortedBytesMergeUtils.init(Type.BYTES_VAR_SORTED, docValues, comp, mergeState.segmentInfo.getDocCount());
final List<SortedSourceSlice> slices = SortedBytesMergeUtils.buildSlices(mergeState.docBase, mergeState.docMaps, docValues, ctx);
IndexOutput datOut = getOrCreateDataOut();

View File

@ -45,7 +45,6 @@ public class MergeState {
public List<IndexReaderAndLiveDocs> readers; // Readers & liveDocs being merged
public int[][] docMaps; // Maps docIDs around deletions
public int[] docBase; // New docID base per reader
public int mergedDocCount; // Total # merged docs
public CheckAbort checkAbort;
public InfoStream infoStream;

View File

@ -107,13 +107,11 @@ final class SegmentMerger {
// IndexWriter.close(false) takes to actually stop the
// threads.
// nocommit: can we nuke this count too?
mergeState.mergedDocCount = setDocMaps();
mergeState.segmentInfo.setDocCount(mergeState.mergedDocCount);
mergeState.segmentInfo.setDocCount(setDocMaps());
mergeDocValuesAndNormsFieldInfos();
setMatchingSegmentReaders();
int numMerged = mergeFields();
assert numMerged == mergeState.mergedDocCount;
assert numMerged == mergeState.segmentInfo.getDocCount();
final SegmentWriteState segmentWriteState = new SegmentWriteState(mergeState.infoStream, directory, mergeState.segmentInfo,
mergeState.fieldInfos, termIndexInterval, null, context);
@ -126,7 +124,7 @@ final class SegmentMerger {
if (mergeState.fieldInfos.hasVectors()) {
numMerged = mergeVectors();
assert numMerged == mergeState.mergedDocCount;
assert numMerged == mergeState.segmentInfo.getDocCount();
}
// write the merged infos

View File

@ -84,7 +84,7 @@ public class TestSegmentMerger extends LuceneTestCase {
merger.add(reader1);
merger.add(reader2);
MergeState mergeState = merger.merge();
int docsMerged = mergeState.mergedDocCount;
int docsMerged = mergeState.segmentInfo.getDocCount();
assertTrue(docsMerged == 2);
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = new SegmentReader(new SegmentInfoPerCommit(