LUCENE-4055: remove last redundant SI/FI boolean

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4055@1339895 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-05-17 22:28:12 +00:00
parent 67747b0955
commit 5c816ad8ae
22 changed files with 38 additions and 75 deletions

View File

@ -60,7 +60,7 @@ class AppendingPostingsFormat extends PostingsFormat {
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
boolean success = false;
try {

View File

@ -137,16 +137,14 @@ class Lucene3xFields extends FieldsProducer {
files.add(IndexFileNames.segmentFileName(info.name, "", Lucene3xPostingsFormat.TERMS_EXTENSION));
files.add(IndexFileNames.segmentFileName(info.name, "", Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION));
files.add(IndexFileNames.segmentFileName(info.name, "", Lucene3xPostingsFormat.FREQ_EXTENSION));
if (info.getHasProx()) {
// LUCENE-1739: for certain versions of 2.9-dev,
// hasProx would be incorrectly computed during
// indexing as true, and then stored into the segments
// file, when it should have been false. So we do the
// extra check, here:
final String prx = IndexFileNames.segmentFileName(info.name, "", Lucene3xPostingsFormat.PROX_EXTENSION);
if (info.dir.fileExists(prx)) {
files.add(prx);
}
// LUCENE-1739: for certain versions of 2.9-dev,
// hasProx would be incorrectly computed during
// indexing as true, and then stored into the segments
// file, when it should have been false. So we do the
// extra check, here:
final String prx = IndexFileNames.segmentFileName(info.name, "", Lucene3xPostingsFormat.PROX_EXTENSION);
if (info.dir.fileExists(prx)) {
files.add(prx);
}
}

View File

@ -132,7 +132,10 @@ class Lucene3xSegmentInfosReader extends SegmentInfosReader {
final Codec codec = Codec.forName("Lucene3x");
final Map<String,String> diagnostics = input.readStringStringMap();
// nocommit cleane up
// nocommit we can use hasProx/hasVectors from the 3.x
// si... if we can pass this to the other components...?
// nocommit clean up
final boolean hasVectors;
if (format <= SegmentInfos.FORMAT_HAS_VECTORS) {
hasVectors = input.readByte() == 1;
@ -153,6 +156,6 @@ class Lucene3xSegmentInfosReader extends SegmentInfosReader {
return new SegmentInfo(dir, version, name, docCount, delGen, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile,
delCount, hasProx, codec, diagnostics);
delCount, codec, diagnostics);
}
}

View File

@ -42,7 +42,7 @@ public final class Lucene40PostingsBaseFormat extends PostingsBaseFormat {
@Override
public PostingsReaderBase postingsReaderBase(SegmentReadState state) throws IOException {
return new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
return new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
}
@Override

View File

@ -307,7 +307,7 @@ public class Lucene40PostingsFormat extends PostingsFormat {
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
boolean success = false;
try {

View File

@ -24,8 +24,9 @@ import org.apache.lucene.codecs.BlockTermState;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.TermState;
@ -57,11 +58,11 @@ public class Lucene40PostingsReader extends PostingsReaderBase {
// private String segment;
public Lucene40PostingsReader(Directory dir, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException {
public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException {
freqIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION),
ioContext);
// this.segment = segmentInfo.name;
if (segmentInfo.getHasProx()) {
if (fieldInfos.hasProx()) {
boolean success = false;
try {
proxIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION),
@ -79,9 +80,7 @@ public class Lucene40PostingsReader extends PostingsReaderBase {
public static void files(SegmentInfo segmentInfo, String segmentSuffix, Collection<String> files) throws IOException {
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION));
if (segmentInfo.getHasProx()) {
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION));
}
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION));
}
@Override

View File

@ -82,12 +82,11 @@ public class Lucene40SegmentInfosReader extends SegmentInfosReader {
final int delCount = input.readInt();
assert delCount <= docCount;
final boolean hasProx = input.readByte() == 1;
final Codec codec = Codec.forName(input.readString());
final Map<String,String> diagnostics = input.readStringStringMap();
return new SegmentInfo(dir, version, name, docCount, delGen, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, normGen, isCompoundFile,
delCount, hasProx, codec, diagnostics);
delCount, codec, diagnostics);
}
}

View File

@ -95,7 +95,6 @@ public class Lucene40SegmentInfosWriter extends SegmentInfosWriter {
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
output.writeInt(si.getDelCount());
output.writeByte((byte) (si.getHasProx() ? 1 : 0));
output.writeString(si.getCodec().getName());
output.writeStringStringMap(si.getDiagnostics());
}

View File

@ -75,7 +75,7 @@ public class SepPostingsReader extends PostingsReaderBase {
} else {
freqIn = null;
}
if (segmentInfo.getHasProx()) {
if (fieldInfos.hasProx()) {
posIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.POS_EXTENSION), context);
payloadIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.PAYLOAD_EXTENSION), context);
} else {
@ -96,10 +96,8 @@ public class SepPostingsReader extends PostingsReaderBase {
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.FREQ_EXTENSION));
if (segmentInfo.getHasProx()) {
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.POS_EXTENSION));
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.PAYLOAD_EXTENSION));
}
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.POS_EXTENSION));
files.add(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, SepPostingsWriter.PAYLOAD_EXTENSION));
}
@Override

View File

@ -102,10 +102,6 @@ public class SimpleTextSegmentInfosReader extends SegmentInfosReader {
assert StringHelper.startsWith(scratch, SI_DELCOUNT);
final int delCount = Integer.parseInt(readString(SI_DELCOUNT.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, SI_HASPROX);
final boolean hasProx = Boolean.parseBoolean(readString(SI_HASPROX.length, scratch));
SimpleTextUtil.readLine(input, scratch);
assert StringHelper.startsWith(scratch, SI_USECOMPOUND);
final boolean isCompoundFile = Boolean.parseBoolean(readString(SI_USECOMPOUND.length, scratch));
@ -164,7 +160,7 @@ public class SimpleTextSegmentInfosReader extends SegmentInfosReader {
return new SegmentInfo(directory, version, name, docCount, delGen, dsOffset,
dsSegment, dsCompoundFile, normGen, isCompoundFile,
delCount, hasProx, codec, diagnostics);
delCount, codec, diagnostics);
}
private String readString(int offset, BytesRef scratch) {

View File

@ -51,7 +51,6 @@ public class SimpleTextSegmentInfosWriter extends SegmentInfosWriter {
final static BytesRef SI_VERSION = new BytesRef(" version ");
final static BytesRef SI_DOCCOUNT = new BytesRef(" number of documents ");
final static BytesRef SI_DELCOUNT = new BytesRef(" number of deletions ");
final static BytesRef SI_HASPROX = new BytesRef(" has prox ");
final static BytesRef SI_USECOMPOUND = new BytesRef(" uses compound file ");
final static BytesRef SI_DSOFFSET = new BytesRef(" docstore offset ");
final static BytesRef SI_DSSEGMENT = new BytesRef(" docstore segment ");
@ -145,10 +144,6 @@ public class SimpleTextSegmentInfosWriter extends SegmentInfosWriter {
SimpleTextUtil.write(output, Integer.toString(si.getDelCount()), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_HASPROX);
SimpleTextUtil.write(output, si.getHasProx() ? "true" : "false", scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, SI_USECOMPOUND);
SimpleTextUtil.write(output, Boolean.toString(si.getUseCompoundFile()), scratch);
SimpleTextUtil.writeNewline(output);

View File

@ -185,11 +185,6 @@ public class CheckIndex {
/** Number of fields in this segment. */
int numFields;
/** True if at least one of the fields in this segment
* has position data
* @see FieldType#setIndexOptions(org.apache.lucene.index.FieldInfo.IndexOptions) */
public boolean hasProx;
/** Map that includes certain
* debugging details that IndexWriter records into
* each segment it creates */
@ -505,8 +500,6 @@ public class CheckIndex {
segInfoStat.codec = codec;
msg(" compound=" + info.getUseCompoundFile());
segInfoStat.compound = info.getUseCompoundFile();
msg(" hasProx=" + info.getHasProx());
segInfoStat.hasProx = info.getHasProx();
msg(" numFiles=" + info.files().size());
segInfoStat.numFiles = info.files().size();
segInfoStat.sizeMB = info.sizeInBytes()/(1024.*1024.);

View File

@ -477,7 +477,7 @@ class DocumentsWriterPerThread {
pendingDeletes.terms.clear();
final SegmentInfo newSegment = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, segment, flushState.numDocs,
SegmentInfo.NO, -1, segment, false, null, false, 0,
flushState.fieldInfos.hasProx(), flushState.codec,
flushState.codec,
null);
if (infoStream.isEnabled("DWPT")) {
infoStream.message("DWPT", "new segment has " + (flushState.liveDocs == null ? 0 : (flushState.numDocs - flushState.delCountOnFlush)) + " deleted docs");

View File

@ -2290,7 +2290,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
int docCount = mergeState.mergedDocCount;
SegmentInfo info = new SegmentInfo(directory, Constants.LUCENE_MAIN_VERSION, mergedName, docCount,
SegmentInfo.NO, -1, mergedName, false, null, false, 0,
mergeState.fieldInfos.hasProx(), codec, null);
codec, null);
setDiagnostics(info, "addIndexes(IndexReader...)");
@ -3434,9 +3434,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
MergeState mergeState = merger.merge();
mergedDocCount = merge.info.docCount = mergeState.mergedDocCount;
// LUCENE-3403: set hasVectors after merge(), so that it is properly set.
merge.info.setHasProx(mergeState.fieldInfos.hasProx());
// Record which codec was used to write the segment
// nocommit stop doing this once we call non-wimpy

View File

@ -42,7 +42,6 @@ import org.apache.lucene.util.Constants;
* @lucene.experimental
*/
public final class SegmentInfo implements Cloneable {
// TODO: remove with hasVector and hasProx
public static final int CHECK_FIELDINFO = -2;
// TODO: remove these from this class, for now this is the representation
@ -87,8 +86,6 @@ public final class SegmentInfo implements Cloneable {
private int delCount; // How many deleted docs in this segment
private boolean hasProx; // True if this segment has any fields with positional information
private Codec codec;
private Map<String,String> diagnostics;
@ -134,7 +131,7 @@ public final class SegmentInfo implements Cloneable {
*/
public SegmentInfo(Directory dir, String version, String name, int docCount, long delGen, int docStoreOffset,
String docStoreSegment, boolean docStoreIsCompoundFile, Map<Integer,Long> normGen, boolean isCompoundFile,
int delCount, boolean hasProx, Codec codec, Map<String,String> diagnostics) {
int delCount, Codec codec, Map<String,String> diagnostics) {
this.dir = dir;
this.version = version;
this.name = name;
@ -146,8 +143,6 @@ public final class SegmentInfo implements Cloneable {
this.normGen = normGen;
this.isCompoundFile = isCompoundFile;
this.delCount = delCount;
// nocommit remove these now that we can do regexp instead!
this.hasProx = hasProx;
this.codec = codec;
this.diagnostics = diagnostics;
}
@ -166,16 +161,6 @@ public final class SegmentInfo implements Cloneable {
return sizeInBytes;
}
// nocommit: ideally codec stores this info privately:
public boolean getHasProx() throws IOException {
return hasProx;
}
public void setHasProx(boolean hasProx) {
this.hasProx = hasProx;
clearFilesCache();
}
public boolean hasDeletions() {
// Cases:
//
@ -221,7 +206,7 @@ public final class SegmentInfo implements Cloneable {
return new SegmentInfo(dir, version, name, docCount, delGen, docStoreOffset,
docStoreSegment, docStoreIsCompoundFile, clonedNormGen, isCompoundFile,
delCount, hasProx, codec, new HashMap<String,String>(diagnostics));
delCount, codec, new HashMap<String,String>(diagnostics));
}
/**

View File

@ -313,7 +313,6 @@ public class TestCodecs extends LuceneTestCase {
Codec codec = Codec.getDefault();
final SegmentInfo si = new SegmentInfo(dir, Constants.LUCENE_MAIN_VERSION, SEGMENT, 10000, -1, -1,
SEGMENT, false, null, false, 0,
fieldInfos.hasProx(),
codec, null);
if (VERBOSE) {

View File

@ -202,7 +202,7 @@ public class TestDoc extends LuceneTestCase {
r2.close();
final SegmentInfo info = new SegmentInfo(si1.dir, Constants.LUCENE_MAIN_VERSION, merged,
si1.docCount + si2.docCount, -1, -1, merged,
false, null, false, 0, mergeState.fieldInfos.hasProx(), codec, null);
false, null, false, 0, codec, null);
if (useCompoundFile) {
Collection<String> filesToDelete = IndexWriter.createCompoundFile(dir, merged + ".cfs", MergeState.CheckAbort.NONE, info, newIOContext(random()));

View File

@ -85,7 +85,7 @@ public class TestSegmentMerger extends LuceneTestCase {
assertTrue(docsMerged == 2);
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = new SegmentReader(new SegmentInfo(mergedDir, Constants.LUCENE_MAIN_VERSION, mergedSegment, docsMerged, -1, -1, mergedSegment,
false, null, false, 0, mergeState.fieldInfos.hasProx(), codec, null),
false, null, false, 0, codec, null),
DirectoryReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random()));
assertTrue(mergedReader != null);
assertTrue(mergedReader.numDocs() == 2);

View File

@ -93,8 +93,10 @@ class PreFlexRWSegmentInfosWriter extends SegmentInfosWriter {
output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO));
output.writeInt(si.getDelCount());
output.writeByte((byte) (si.getHasProx() ? 1 : 0));
// hasProx:
output.writeByte((byte) 1);
output.writeStringStringMap(si.getDiagnostics());
// hasVectors:
output.writeByte((byte) 1);
}

View File

@ -88,7 +88,7 @@ public class Lucene40WithOrds extends PostingsFormat {
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
PostingsReaderBase postings = new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
TermsIndexReaderBase indexReader;
boolean success = false;

View File

@ -299,7 +299,7 @@ public class MockRandomPostingsFormat extends PostingsFormat {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Standard postings");
}
postingsReader = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
postingsReader = new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
}
if (random.nextBoolean()) {

View File

@ -70,7 +70,7 @@ public class NestedPulsingPostingsFormat extends PostingsFormat {
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase docsReader = new Lucene40PostingsReader(state.dir, state.segmentInfo, state.context, state.segmentSuffix);
PostingsReaderBase docsReader = new Lucene40PostingsReader(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
PostingsReaderBase pulsingReaderInner = new PulsingPostingsReader(docsReader);
PostingsReaderBase pulsingReader = new PulsingPostingsReader(pulsingReaderInner);
boolean success = false;