mirror of https://github.com/apache/lucene.git
LUCENE-5969: clean up unnecessary back compat and add segment header
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5969@1628679 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f70dba06f6
commit
c6c603ace5
|
@ -78,11 +78,6 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
// Reads the terms index
|
||||
private TermsIndexReaderBase indexReader;
|
||||
|
||||
// keeps the dirStart offset
|
||||
private long dirOffset;
|
||||
|
||||
private final int version;
|
||||
|
||||
// Used as key for the terms cache
|
||||
private static class FieldAndTerm extends DoubleBarrelLRUCache.CloneableKey {
|
||||
String field;
|
||||
|
@ -127,21 +122,22 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
|
||||
boolean success = false;
|
||||
try {
|
||||
version = readHeader(in);
|
||||
CodecUtil.checkSegmentHeader(in, BlockTermsWriter.CODEC_NAME,
|
||||
BlockTermsWriter.VERSION_START,
|
||||
BlockTermsWriter.VERSION_CURRENT,
|
||||
info.getId(), segmentSuffix);
|
||||
|
||||
// Have PostingsReader init itself
|
||||
postingsReader.init(in);
|
||||
|
||||
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
|
||||
// NOTE: data file is too costly to verify checksum against all the bytes on open,
|
||||
// but for now we at least verify proper structure of the checksum footer: which looks
|
||||
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
|
||||
// such as file truncation.
|
||||
CodecUtil.retrieveChecksum(in);
|
||||
}
|
||||
|
||||
// Read per-field details
|
||||
seekDir(in, dirOffset);
|
||||
seekDir(in);
|
||||
|
||||
final int numFields = in.readVInt();
|
||||
if (numFields < 0) {
|
||||
|
@ -156,7 +152,7 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS_ONLY ? -1 : in.readVLong();
|
||||
final long sumDocFreq = in.readVLong();
|
||||
final int docCount = in.readVInt();
|
||||
final int longsSize = version >= BlockTermsWriter.VERSION_META_ARRAY ? in.readVInt() : 0;
|
||||
final int longsSize = in.readVInt();
|
||||
if (docCount < 0 || docCount > info.getDocCount()) { // #docs with field must be <= #docs
|
||||
throw new CorruptIndexException("invalid docCount: " + docCount + " maxDoc: " + info.getDocCount(), in);
|
||||
}
|
||||
|
@ -181,24 +177,9 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
this.indexReader = indexReader;
|
||||
}
|
||||
|
||||
private int readHeader(IndexInput input) throws IOException {
|
||||
int version = CodecUtil.checkHeader(input, BlockTermsWriter.CODEC_NAME,
|
||||
BlockTermsWriter.VERSION_START,
|
||||
BlockTermsWriter.VERSION_CURRENT);
|
||||
if (version < BlockTermsWriter.VERSION_APPEND_ONLY) {
|
||||
dirOffset = input.readLong();
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
private void seekDir(IndexInput input, long dirOffset) throws IOException {
|
||||
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
|
||||
private void seekDir(IndexInput input) throws IOException {
|
||||
input.seek(input.length() - CodecUtil.footerLength() - 8);
|
||||
dirOffset = input.readLong();
|
||||
} else if (version >= BlockTermsWriter.VERSION_APPEND_ONLY) {
|
||||
input.seek(input.length() - 8);
|
||||
dirOffset = input.readLong();
|
||||
}
|
||||
long dirOffset = input.readLong();
|
||||
input.seek(dirOffset);
|
||||
}
|
||||
|
||||
|
@ -906,9 +887,8 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
@Override
|
||||
public void checkIntegrity() throws IOException {
|
||||
// verify terms
|
||||
if (version >= BlockTermsWriter.VERSION_CHECKSUM) {
|
||||
CodecUtil.checksumEntireFile(in);
|
||||
}
|
||||
|
||||
// verify postings
|
||||
postingsReader.checkIntegrity();
|
||||
}
|
||||
|
|
|
@ -62,11 +62,8 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
|
|||
final static String CODEC_NAME = "BLOCK_TERMS_DICT";
|
||||
|
||||
// Initial format
|
||||
public static final int VERSION_START = 0;
|
||||
public static final int VERSION_APPEND_ONLY = 1;
|
||||
public static final int VERSION_META_ARRAY = 2;
|
||||
public static final int VERSION_CHECKSUM = 3;
|
||||
public static final int VERSION_CURRENT = VERSION_CHECKSUM;
|
||||
public static final int VERSION_START = 4;
|
||||
public static final int VERSION_CURRENT = VERSION_START;
|
||||
|
||||
/** Extension of terms file */
|
||||
static final String TERMS_EXTENSION = "tib";
|
||||
|
@ -113,7 +110,7 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
|
|||
boolean success = false;
|
||||
try {
|
||||
fieldInfos = state.fieldInfos;
|
||||
writeHeader(out);
|
||||
CodecUtil.writeSegmentHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
currentField = null;
|
||||
this.postingsWriter = postingsWriter;
|
||||
// segment = state.segmentName;
|
||||
|
@ -129,10 +126,6 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private void writeHeader(IndexOutput out) throws IOException {
|
||||
CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(Fields fields) throws IOException {
|
||||
|
||||
|
@ -184,10 +177,8 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
|
|||
}
|
||||
out.writeVLong(field.sumDocFreq);
|
||||
out.writeVInt(field.docCount);
|
||||
if (VERSION_CURRENT >= VERSION_META_ARRAY) {
|
||||
out.writeVInt(field.longsSize);
|
||||
}
|
||||
}
|
||||
writeTrailer(dirStart);
|
||||
CodecUtil.writeFooter(out);
|
||||
} finally {
|
||||
|
|
Loading…
Reference in New Issue