LUCENE-4055: more cleanups

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1341139 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-05-21 18:38:11 +00:00
parent f4e1fa1387
commit 9aa6f6b97e
4 changed files with 11 additions and 38 deletions

View File

@ -18,7 +18,6 @@ package org.apache.lucene.codecs.lucene3x;
*/
import java.io.IOException;
import java.util.Arrays; // nocommit
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -186,12 +185,9 @@ public class Lucene3xSegmentInfosReader extends SegmentInfosReader {
final Map<String,String> diagnostics = input.readStringStringMap();
// nocommit unused...
final int hasVectors;
if (format <= Lucene3xSegmentInfosFormat.FORMAT_HAS_VECTORS) {
hasVectors = input.readByte();
} else {
hasVectors = -1;
// NOTE: unused
final int hasVectors = input.readByte();
}
final Set<String> files;
@ -240,19 +236,11 @@ public class Lucene3xSegmentInfosReader extends SegmentInfosReader {
} else {
// nocommit -- i thought _X_N.sY files were pre-3.0...????
assert false;
/*
System.out.println("FILES: " + Arrays.toString(dir.listAll()) + "; seg=" + segmentName);
addIfExists(dir, files, IndexFileNames.fileNameFromGeneration(segmentName, "s" + ent.getKey(), gen));
assert false: "gen=" + gen;
*/
}
}
}
}
// nocommit we can use hasProx/hasVectors from the 3.x
// si... if we can pass this to the other components...?
SegmentInfo info = new SegmentInfo(dir, version, segmentName, docCount, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile,
delCount, null, diagnostics);

View File

@ -103,8 +103,7 @@ final class IndexFileDeleter {
/** Change to true to see details of reference counts when
* infoStream is enabled */
// nocommit back to false:
public static boolean VERBOSE_REF_COUNTS = true;
public static boolean VERBOSE_REF_COUNTS = false;
// Used only for assert
private final IndexWriter writer;

View File

@ -1501,15 +1501,14 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
}
mergeMaxNumSegments = maxNumSegments;
// Now mark all pending & running merges as isMaxNumSegments:
// Now mark all pending & running merges for forced
// merge:
for(final MergePolicy.OneMerge merge : pendingMerges) {
merge.maxNumSegments = maxNumSegments;
segmentsToMerge.put(merge.info, Boolean.TRUE);
}
for ( final MergePolicy.OneMerge merge: runningMerges ) {
for (final MergePolicy.OneMerge merge: runningMerges) {
merge.maxNumSegments = maxNumSegments;
segmentsToMerge.put(merge.info, Boolean.TRUE);
}
}
@ -2040,7 +2039,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// above:
codec.segmentInfosFormat().getSegmentInfosWriter().write(directory, newSegment, flushedSegment.fieldInfos, context);
// nocommit ideally we would freeze merge.info here!!
// nocommit ideally we would freeze newSegment here!!
// because any changes after writing the .si will be
// lost...
@ -2291,8 +2290,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
}
MergeState mergeState = merger.merge(); // merge 'em
int docCount = mergeState.mergedDocCount;
SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, docCount,
SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, mergeState.mergedDocCount,
-1, mergedName, false, null, false, 0,
codec, null);
info.setFiles(new HashSet<String>(trackingDir.getCreatedFiles()));
@ -2815,6 +2813,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// merge will skip merging it and will then drop
// it once it's done:
if (!mergingSegments.contains(info)) {
System.out.println("drop all del seg=" + info.name);
segmentInfos.remove(info);
readerPool.drop(info);
}
@ -3311,7 +3310,8 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
final String mergeSegmentName = newSegmentName();
merge.info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergeSegmentName, 0, -1, mergeSegmentName, false, null, false, 0, codec, details);
merge.info.setBufferedDeletesGen(result.gen);
// nocommit
// merge.info.setBufferedDeletesGen(result.gen);
// Lock order: IW -> BD
bufferedDeletesStream.prune(segmentInfos);
@ -3329,16 +3329,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
merge.estimatedMergeBytes += info.sizeInBytes() * (1.0 - delRatio);
}
}
// TODO: I think this should no longer be needed (we
// now build CFS before adding segment to the infos);
// however, on removing it, tests fail for some reason!
// Also enroll the merged segment into mergingSegments;
// this prevents it from getting selected for a merge
// after our merge is done but while we are building the
// CFS:
mergingSegments.add(merge.info);
}
static void setDiagnostics(SegmentInfo info, String source) {
@ -3375,9 +3365,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
for(SegmentInfo info : sourceSegments) {
mergingSegments.remove(info);
}
// TODO: if we remove the add in _mergeInit, we should
// also remove this:
mergingSegments.remove(merge.info);
merge.registerDone = false;
}

View File

@ -110,7 +110,6 @@ final class SegmentCoreReaders {
// ask codec for its Norms:
// TODO: since we don't write any norms file if there are no norms,
// kinda jaky to assume the codec handles the case of no norms file at all gracefully?!
// nocommit shouldn't we check si.getHasNorms()/si.getHasDocValues()...?
norms = codec.normsFormat().docsProducer(segmentReadState);
perDocProducer = codec.docValuesFormat().docsProducer(segmentReadState);