mirror of
https://github.com/apache/lucene.git
synced 2025-02-11 04:25:40 +00:00
LUCENE-5969: segmentHeader -> indexHeader, cut over segments_N, detect mismatched .si, consistency of encoding elsewhere
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5969@1633514 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
20e798c5eb
commit
c4bf601b68
@ -45,7 +45,7 @@ public class Lucene40SegmentInfoFormat extends SegmentInfoFormat {
|
||||
}
|
||||
|
||||
@Override
|
||||
public final SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException {
|
||||
public final SegmentInfo read(Directory dir, String segment, byte segmentID[], IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene40SegmentInfoFormat.SI_EXTENSION);
|
||||
final IndexInput input = dir.openInput(fileName, context);
|
||||
boolean success = false;
|
||||
|
@ -44,7 +44,7 @@ public class Lucene46SegmentInfoFormat extends SegmentInfoFormat {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException {
|
||||
public SegmentInfo read(Directory dir, String segment, byte segmentID[], IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene46SegmentInfoFormat.SI_EXTENSION);
|
||||
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
|
||||
int codecVersion = CodecUtil.checkHeader(input, Lucene46SegmentInfoFormat.CODEC_NAME,
|
||||
|
@ -1075,8 +1075,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||
}
|
||||
|
||||
private int checkAllSegmentsUpgraded(Directory dir) throws IOException {
|
||||
final SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
final SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
if (VERBOSE) {
|
||||
System.out.println("checkAllSegmentsUpgraded: " + infos);
|
||||
}
|
||||
@ -1087,8 +1086,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||
}
|
||||
|
||||
private int getNumberOfSegments(Directory dir) throws IOException {
|
||||
final SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
final SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
return infos.size();
|
||||
}
|
||||
|
||||
@ -1306,7 +1304,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.commit();
|
||||
writer.rollback();
|
||||
new SegmentInfos().read(dir);
|
||||
SegmentInfos.readLatestCommit(dir);
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
@ -833,8 +833,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
||||
ir.close();
|
||||
|
||||
// Make sure we have 3 segments:
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(benchmark.getRunData().getDirectory());
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(benchmark.getRunData().getDirectory());
|
||||
assertEquals(3, infos.size());
|
||||
}
|
||||
|
||||
|
@ -50,8 +50,7 @@ public class CommitIndexTaskTest extends BenchmarkTestCase {
|
||||
CommitIndexTask task = new CommitIndexTask(runData);
|
||||
task.setParams("params");
|
||||
task.doLogic();
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(runData.getDirectory());
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(runData.getDirectory());
|
||||
assertEquals("params", infos.getUserData().get(OpenReaderTask.USER_DATA));
|
||||
new CloseIndexTask(runData).doLogic();
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ public class BlockTermsReader extends FieldsProducer {
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(in, BlockTermsWriter.CODEC_NAME,
|
||||
CodecUtil.checkIndexHeader(in, BlockTermsWriter.CODEC_NAME,
|
||||
BlockTermsWriter.VERSION_START,
|
||||
BlockTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -110,7 +110,7 @@ public class BlockTermsWriter extends FieldsConsumer implements Closeable {
|
||||
boolean success = false;
|
||||
try {
|
||||
fieldInfos = state.fieldInfos;
|
||||
CodecUtil.writeSegmentHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
currentField = null;
|
||||
this.postingsWriter = postingsWriter;
|
||||
// segment = state.segmentName;
|
||||
|
@ -74,7 +74,7 @@ public class FixedGapTermsIndexReader extends TermsIndexReaderBase {
|
||||
|
||||
try {
|
||||
|
||||
CodecUtil.checkSegmentHeader(in, FixedGapTermsIndexWriter.CODEC_NAME,
|
||||
CodecUtil.checkIndexHeader(in, FixedGapTermsIndexWriter.CODEC_NAME,
|
||||
FixedGapTermsIndexWriter.VERSION_CURRENT,
|
||||
FixedGapTermsIndexWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -72,7 +72,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||
out = state.directory.createOutput(indexFileName, state.context);
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
out.writeVInt(termIndexInterval);
|
||||
out.writeVInt(PackedInts.VERSION_CURRENT);
|
||||
out.writeVInt(BLOCKSIZE);
|
||||
|
@ -54,7 +54,7 @@ public class VariableGapTermsIndexReader extends TermsIndexReaderBase {
|
||||
|
||||
try {
|
||||
|
||||
CodecUtil.checkSegmentHeader(in, VariableGapTermsIndexWriter.CODEC_NAME,
|
||||
CodecUtil.checkIndexHeader(in, VariableGapTermsIndexWriter.CODEC_NAME,
|
||||
VariableGapTermsIndexWriter.VERSION_START,
|
||||
VariableGapTermsIndexWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -182,7 +182,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||
try {
|
||||
fieldInfos = state.fieldInfos;
|
||||
this.policy = policy;
|
||||
CodecUtil.writeSegmentHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
@ -73,7 +73,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
|
||||
IndexInput indexIn = null;
|
||||
|
||||
try {
|
||||
int version = CodecUtil.checkSegmentHeader(in, OrdsBlockTreeTermsWriter.TERMS_CODEC_NAME,
|
||||
int version = CodecUtil.checkIndexHeader(in, OrdsBlockTreeTermsWriter.TERMS_CODEC_NAME,
|
||||
OrdsBlockTreeTermsWriter.VERSION_START,
|
||||
OrdsBlockTreeTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
@ -82,7 +82,7 @@ public final class OrdsBlockTreeTermsReader extends FieldsProducer {
|
||||
state.segmentSuffix,
|
||||
OrdsBlockTreeTermsWriter.TERMS_INDEX_EXTENSION);
|
||||
indexIn = state.directory.openInput(indexFile, state.context);
|
||||
int indexVersion = CodecUtil.checkSegmentHeader(indexIn, OrdsBlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
int indexVersion = CodecUtil.checkIndexHeader(indexIn, OrdsBlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
OrdsBlockTreeTermsWriter.VERSION_START,
|
||||
OrdsBlockTreeTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -203,11 +203,11 @@ public final class OrdsBlockTreeTermsWriter extends FieldsConsumer {
|
||||
fieldInfos = state.fieldInfos;
|
||||
this.minItemsInBlock = minItemsInBlock;
|
||||
this.maxItemsInBlock = maxItemsInBlock;
|
||||
CodecUtil.writeSegmentHeader(out, TERMS_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, TERMS_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION);
|
||||
indexOut = state.directory.createOutput(termsIndexFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
this.postingsWriter = postingsWriter;
|
||||
// segment = state.segmentInfo.name;
|
||||
|
@ -72,7 +72,7 @@ import org.apache.lucene.util.automaton.CompiledAutomaton;
|
||||
* NumFilteredFields, Filter<sup>NumFilteredFields</sup>, Footer</li>
|
||||
* <li>Filter --> FieldNumber, FuzzySet</li>
|
||||
* <li>FuzzySet -->See {@link FuzzySet#serialize(DataOutput)}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>DelegatePostingsFormatName --> {@link DataOutput#writeString(String)
|
||||
* String} The name of a ServiceProvider registered {@link PostingsFormat}</li>
|
||||
* <li>NumFilteredFields --> {@link DataOutput#writeInt Uint32}</li>
|
||||
@ -166,7 +166,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
|
||||
boolean success = false;
|
||||
try {
|
||||
bloomIn = state.directory.openChecksumInput(bloomFileName, state.context);
|
||||
CodecUtil.checkSegmentHeader(bloomIn, BLOOM_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(bloomIn, BLOOM_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
// // Load the hash function used in the BloomFilter
|
||||
// hashFunction = HashFunction.forName(bloomIn.readString());
|
||||
// Load the delegate postings format
|
||||
@ -502,7 +502,7 @@ public final class BloomFilteringPostingsFormat extends PostingsFormat {
|
||||
IndexOutput bloomOutput = null;
|
||||
try {
|
||||
bloomOutput = state.directory.createOutput(bloomFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(bloomOutput, BLOOM_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(bloomOutput, BLOOM_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
// remember the name of the postings format we will delegate to
|
||||
bloomOutput.writeString(delegatePostingsFormat.getName());
|
||||
|
||||
|
@ -52,10 +52,10 @@ class DirectDocValuesConsumer extends DocValuesConsumer {
|
||||
try {
|
||||
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
|
||||
data = state.directory.createOutput(dataName, state.context);
|
||||
CodecUtil.writeSegmentHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
|
||||
meta = state.directory.createOutput(metaName, state.context);
|
||||
CodecUtil.writeSegmentHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
@ -122,7 +122,7 @@ class DirectDocValuesProducer extends DocValuesProducer {
|
||||
ramBytesUsed = new AtomicLong(RamUsageEstimator.shallowSizeOfInstance(getClass()));
|
||||
boolean success = false;
|
||||
try {
|
||||
version = CodecUtil.checkSegmentHeader(in, metaCodec, VERSION_START, VERSION_CURRENT,
|
||||
version = CodecUtil.checkIndexHeader(in, metaCodec, VERSION_START, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
numEntries = readFields(in, state.fieldInfos);
|
||||
|
||||
@ -140,7 +140,7 @@ class DirectDocValuesProducer extends DocValuesProducer {
|
||||
this.data = state.directory.openInput(dataName, state.context);
|
||||
success = false;
|
||||
try {
|
||||
final int version2 = CodecUtil.checkSegmentHeader(data, dataCodec, VERSION_START, VERSION_CURRENT,
|
||||
final int version2 = CodecUtil.checkIndexHeader(data, dataCodec, VERSION_START, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
if (version != version2) {
|
||||
throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data);
|
||||
|
@ -88,11 +88,11 @@ public class FSTOrdTermsReader extends FieldsProducer {
|
||||
try {
|
||||
indexIn = state.directory.openChecksumInput(termsIndexFileName, state.context);
|
||||
blockIn = state.directory.openInput(termsBlockFileName, state.context);
|
||||
int version = CodecUtil.checkSegmentHeader(indexIn, FSTOrdTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
int version = CodecUtil.checkIndexHeader(indexIn, FSTOrdTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
FSTOrdTermsWriter.VERSION_START,
|
||||
FSTOrdTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
int version2 = CodecUtil.checkSegmentHeader(blockIn, FSTOrdTermsWriter.TERMS_CODEC_NAME,
|
||||
int version2 = CodecUtil.checkIndexHeader(blockIn, FSTOrdTermsWriter.TERMS_CODEC_NAME,
|
||||
FSTOrdTermsWriter.VERSION_START,
|
||||
FSTOrdTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -75,7 +75,7 @@ import org.apache.lucene.util.fst.Util;
|
||||
* <ul>
|
||||
* <li>TermIndex(.tix) --> Header, TermFST<sup>NumFields</sup>, Footer</li>
|
||||
* <li>TermFST --> {@link FST FST<long>}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>Footer --> {@link CodecUtil#writeFooter CodecFooter}</li>
|
||||
* </ul>
|
||||
*
|
||||
@ -113,7 +113,7 @@ import org.apache.lucene.util.fst.Util;
|
||||
* <li>StatsBlock --> < DocFreq[Same?], (TotalTermFreq-DocFreq) ? > <sup>NumTerms</sup>
|
||||
* <li>MetaLongsBlock --> < LongDelta<sup>LongsSize</sup>, BytesSize > <sup>NumTerms</sup>
|
||||
* <li>MetaBytesBlock --> Byte <sup>MetaBytesBlockLength</sup>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader CodecHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>DirOffset --> {@link DataOutput#writeLong Uint64}</li>
|
||||
* <li>NumFields, FieldNumber, DocCount, DocFreq, LongsSize,
|
||||
* FieldNumber, DocCount --> {@link DataOutput#writeVInt VInt}</li>
|
||||
@ -174,9 +174,9 @@ public class FSTOrdTermsWriter extends FieldsConsumer {
|
||||
try {
|
||||
this.indexOut = state.directory.createOutput(termsIndexFileName, state.context);
|
||||
this.blockOut = state.directory.createOutput(termsBlockFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeSegmentHeader(blockOut, TERMS_CODEC_NAME, VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(blockOut, TERMS_CODEC_NAME, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
this.postingsWriter.init(blockOut, state);
|
||||
success = true;
|
||||
|
@ -81,7 +81,7 @@ public class FSTTermsReader extends FieldsProducer {
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(in, FSTTermsWriter.TERMS_CODEC_NAME,
|
||||
CodecUtil.checkIndexHeader(in, FSTTermsWriter.TERMS_CODEC_NAME,
|
||||
FSTTermsWriter.TERMS_VERSION_START,
|
||||
FSTTermsWriter.TERMS_VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -90,7 +90,7 @@ import org.apache.lucene.util.fst.Util;
|
||||
* <li>TermFST --> {@link FST FST<TermData>}</li>
|
||||
* <li>TermData --> Flag, BytesSize?, LongDelta<sup>LongsSize</sup>?, Byte<sup>BytesSize</sup>?,
|
||||
* < DocFreq[Same?], (TotalTermFreq-DocFreq) > ? </li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>DirOffset --> {@link DataOutput#writeLong Uint64}</li>
|
||||
* <li>DocFreq, LongsSize, BytesSize, NumFields,
|
||||
* FieldNumber, DocCount --> {@link DataOutput#writeVInt VInt}</li>
|
||||
@ -142,7 +142,7 @@ public class FSTTermsWriter extends FieldsConsumer {
|
||||
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
this.postingsWriter.init(out, state);
|
||||
|
@ -74,10 +74,10 @@ class MemoryDocValuesConsumer extends DocValuesConsumer {
|
||||
try {
|
||||
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
|
||||
data = state.directory.createOutput(dataName, state.context);
|
||||
CodecUtil.writeSegmentHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
|
||||
meta = state.directory.createOutput(metaName, state.context);
|
||||
CodecUtil.writeSegmentHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
@ -146,7 +146,7 @@ class MemoryDocValuesProducer extends DocValuesProducer {
|
||||
ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context);
|
||||
boolean success = false;
|
||||
try {
|
||||
version = CodecUtil.checkSegmentHeader(in, metaCodec, VERSION_START, VERSION_CURRENT,
|
||||
version = CodecUtil.checkIndexHeader(in, metaCodec, VERSION_START, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
numEntries = readFields(in, state.fieldInfos);
|
||||
CodecUtil.checkFooter(in);
|
||||
@ -164,7 +164,7 @@ class MemoryDocValuesProducer extends DocValuesProducer {
|
||||
this.data = state.directory.openInput(dataName, state.context);
|
||||
success = false;
|
||||
try {
|
||||
final int version2 = CodecUtil.checkSegmentHeader(data, dataCodec, VERSION_START, VERSION_CURRENT,
|
||||
final int version2 = CodecUtil.checkIndexHeader(data, dataCodec, VERSION_START, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
if (version != version2) {
|
||||
throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data);
|
||||
|
@ -288,7 +288,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
|
||||
out = state.directory.createOutput(fileName, state.context);
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
@ -981,7 +981,7 @@ public final class MemoryPostingsFormat extends PostingsFormat {
|
||||
try (ChecksumIndexInput in = state.directory.openChecksumInput(fileName, IOContext.READONCE)) {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(in, CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
while(true) {
|
||||
final int termCount = in.readVInt();
|
||||
if (termCount == 0) {
|
||||
|
@ -60,7 +60,7 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
||||
public static final String SI_EXTENSION = "si";
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException {
|
||||
public SegmentInfo read(Directory directory, String segmentName, byte[] segmentID, IOContext context) throws IOException {
|
||||
BytesRefBuilder scratch = new BytesRefBuilder();
|
||||
String segFileName = IndexFileNames.segmentFileName(segmentName, "", SimpleTextSegmentInfoFormat.SI_EXTENSION);
|
||||
ChecksumIndexInput input = directory.openChecksumInput(segFileName, context);
|
||||
@ -114,6 +114,11 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
|
||||
SimpleTextUtil.readLine(input, scratch);
|
||||
assert StringHelper.startsWith(scratch.get(), SI_ID);
|
||||
final byte[] id = Arrays.copyOfRange(scratch.bytes(), SI_ID.length, scratch.length());
|
||||
|
||||
if (!Arrays.equals(segmentID, id)) {
|
||||
throw new CorruptIndexException("file mismatch, expected: " + StringHelper.idToString(segmentID)
|
||||
+ ", got: " + StringHelper.idToString(id), input);
|
||||
}
|
||||
|
||||
SimpleTextUtil.checkFooter(input);
|
||||
|
||||
|
@ -94,46 +94,48 @@ public final class CodecUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a codec header for a per-segment, which records both a string to
|
||||
* identify the file, a version number, and the unique ID of the segment.
|
||||
* This header can be parsed and validated with
|
||||
* {@link #checkSegmentHeader(DataInput, String, int, int, byte[], String) checkSegmentHeader()}.
|
||||
* Writes a codec header for an index file, which records both a string to
|
||||
* identify the format of the file, a version number, and data to identify
|
||||
* the file instance (ID and auxiliary suffix such as generation).
|
||||
* <p>
|
||||
* CodecSegmentHeader --> CodecHeader,SegmentID,SegmentSuffix
|
||||
* This header can be parsed and validated with
|
||||
* {@link #checkIndexHeader(DataInput, String, int, int, byte[], String) checkIndexHeader()}.
|
||||
* <p>
|
||||
* IndexHeader --> CodecHeader,ObjectID,ObjectSuffix
|
||||
* <ul>
|
||||
* <li>CodecHeader --> {@link #writeHeader}
|
||||
* <li>SegmentID --> {@link DataOutput#writeByte byte}<sup>16</sup>
|
||||
* <li>SegmentSuffix --> SuffixLength,SuffixBytes
|
||||
* <li>ObjectID --> {@link DataOutput#writeByte byte}<sup>16</sup>
|
||||
* <li>ObjectSuffix --> SuffixLength,SuffixBytes
|
||||
* <li>SuffixLength --> {@link DataOutput#writeByte byte}
|
||||
* <li>SuffixBytes --> {@link DataOutput#writeByte byte}<sup>SuffixLength</sup>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Note that the length of a segment header depends only upon the
|
||||
* Note that the length of an index header depends only upon the
|
||||
* name of the codec and suffix, so this length can be computed at any time
|
||||
* with {@link #segmentHeaderLength(String,String)}.
|
||||
* with {@link #indexHeaderLength(String,String)}.
|
||||
*
|
||||
* @param out Output stream
|
||||
* @param codec String to identify this file. It should be simple ASCII,
|
||||
* @param codec String to identify the format of this file. It should be simple ASCII,
|
||||
* less than 128 characters in length.
|
||||
* @param segmentID Unique identifier for the segment
|
||||
* @param segmentSuffix auxiliary suffix for the file. It should be simple ASCII,
|
||||
* @param id Unique identifier for this particular file instance.
|
||||
* @param suffix auxiliary suffix information for the file. It should be simple ASCII,
|
||||
* less than 256 characters in length.
|
||||
* @param version Version number
|
||||
* @throws IOException If there is an I/O error writing to the underlying medium.
|
||||
* @throws IllegalArgumentException If the codec name is not simple ASCII, or
|
||||
* is more than 127 characters in length, or if segmentID is invalid,
|
||||
* or if the segmentSuffix is not simple ASCII, or more than 255 characters
|
||||
* is more than 127 characters in length, or if id is invalid,
|
||||
* or if the suffix is not simple ASCII, or more than 255 characters
|
||||
* in length.
|
||||
*/
|
||||
public static void writeSegmentHeader(DataOutput out, String codec, int version, byte[] segmentID, String segmentSuffix) throws IOException {
|
||||
if (segmentID.length != StringHelper.ID_LENGTH) {
|
||||
throw new IllegalArgumentException("Invalid id: " + StringHelper.idToString(segmentID));
|
||||
public static void writeIndexHeader(DataOutput out, String codec, int version, byte[] id, String suffix) throws IOException {
|
||||
if (id.length != StringHelper.ID_LENGTH) {
|
||||
throw new IllegalArgumentException("Invalid id: " + StringHelper.idToString(id));
|
||||
}
|
||||
writeHeader(out, codec, version);
|
||||
out.writeBytes(segmentID, 0, segmentID.length);
|
||||
BytesRef suffixBytes = new BytesRef(segmentSuffix);
|
||||
if (suffixBytes.length != segmentSuffix.length() || suffixBytes.length >= 256) {
|
||||
throw new IllegalArgumentException("codec must be simple ASCII, less than 256 characters in length [got " + segmentSuffix + "]");
|
||||
out.writeBytes(id, 0, id.length);
|
||||
BytesRef suffixBytes = new BytesRef(suffix);
|
||||
if (suffixBytes.length != suffix.length() || suffixBytes.length >= 256) {
|
||||
throw new IllegalArgumentException("codec must be simple ASCII, less than 256 characters in length [got " + suffix + "]");
|
||||
}
|
||||
out.writeByte((byte)suffixBytes.length);
|
||||
out.writeBytes(suffixBytes.bytes, suffixBytes.offset, suffixBytes.length);
|
||||
@ -151,14 +153,14 @@ public final class CodecUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the length of a segment header.
|
||||
* Computes the length of an index header.
|
||||
*
|
||||
* @param codec Codec name.
|
||||
* @return length of the entire segment header.
|
||||
* @see #writeSegmentHeader(DataOutput, String, int, byte[], String)
|
||||
* @return length of the entire index header.
|
||||
* @see #writeIndexHeader(DataOutput, String, int, byte[], String)
|
||||
*/
|
||||
public static int segmentHeaderLength(String codec, String segmentSuffix) {
|
||||
return headerLength(codec) + StringHelper.ID_LENGTH + 1 + segmentSuffix.length();
|
||||
public static int indexHeaderLength(String codec, String suffix) {
|
||||
return headerLength(codec) + StringHelper.ID_LENGTH + 1 + suffix.length();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -220,11 +222,11 @@ public final class CodecUtil {
|
||||
|
||||
/**
|
||||
* Reads and validates a header previously written with
|
||||
* {@link #writeSegmentHeader(DataOutput, String, int, byte[], String)}.
|
||||
* {@link #writeIndexHeader(DataOutput, String, int, byte[], String)}.
|
||||
* <p>
|
||||
* When reading a file, supply the expected <code>codec</code>,
|
||||
* expected version range (<code>minVersion to maxVersion</code>),
|
||||
* and segment ID.
|
||||
* and object ID and suffix.
|
||||
*
|
||||
* @param in Input stream, positioned at the point where the
|
||||
* header was previously written. Typically this is located
|
||||
@ -232,41 +234,53 @@ public final class CodecUtil {
|
||||
* @param codec The expected codec name.
|
||||
* @param minVersion The minimum supported expected version number.
|
||||
* @param maxVersion The maximum supported expected version number.
|
||||
* @param segmentID The expected segment this file belongs to.
|
||||
* @param segmentSuffix The expected auxiliary segment suffix for this file.
|
||||
* @param expectedID The expected object identifier for this file.
|
||||
* @param expectedSuffix The expected auxiliary suffix for this file.
|
||||
* @return The actual version found, when a valid header is found
|
||||
* that matches <code>codec</code>, with an actual version
|
||||
* where <code>minVersion <= actual <= maxVersion</code>,
|
||||
* and matching <code>segmentID</code>
|
||||
* and matching <code>expectedID</code> and <code>expectedSuffix</code>
|
||||
* Otherwise an exception is thrown.
|
||||
* @throws CorruptIndexException If the first four bytes are not
|
||||
* {@link #CODEC_MAGIC}, or if the actual codec found is
|
||||
* not <code>codec</code>, or if the <code>segmentID</code>
|
||||
* or <code>segmentSuffix</code> do not match.
|
||||
* not <code>codec</code>, or if the <code>expectedID</code>
|
||||
* or <code>expectedSuffix</code> do not match.
|
||||
* @throws IndexFormatTooOldException If the actual version is less
|
||||
* than <code>minVersion</code>.
|
||||
* @throws IndexFormatTooNewException If the actual version is greater
|
||||
* than <code>maxVersion</code>.
|
||||
* @throws IOException If there is an I/O error reading from the underlying medium.
|
||||
* @see #writeSegmentHeader(DataOutput, String, int, byte[],String)
|
||||
* @see #writeIndexHeader(DataOutput, String, int, byte[],String)
|
||||
*/
|
||||
public static int checkSegmentHeader(DataInput in, String codec, int minVersion, int maxVersion, byte[] segmentID, String segmentSuffix) throws IOException {
|
||||
public static int checkIndexHeader(DataInput in, String codec, int minVersion, int maxVersion, byte[] expectedID, String expectedSuffix) throws IOException {
|
||||
int version = checkHeader(in, codec, minVersion, maxVersion);
|
||||
checkIndexHeaderID(in, expectedID);
|
||||
checkIndexHeaderSuffix(in, expectedSuffix);
|
||||
return version;
|
||||
}
|
||||
|
||||
/** Expert: just reads and verifies the object ID of an index header */
|
||||
public static byte[] checkIndexHeaderID(DataInput in, byte[] expectedID) throws IOException {
|
||||
byte id[] = new byte[StringHelper.ID_LENGTH];
|
||||
in.readBytes(id, 0, id.length);
|
||||
if (!Arrays.equals(id, segmentID)) {
|
||||
throw new CorruptIndexException("file mismatch, expected segment id=" + StringHelper.idToString(segmentID)
|
||||
+ ", got=" + StringHelper.idToString(id), in);
|
||||
if (!Arrays.equals(id, expectedID)) {
|
||||
throw new CorruptIndexException("file mismatch, expected id=" + StringHelper.idToString(expectedID)
|
||||
+ ", got=" + StringHelper.idToString(id), in);
|
||||
}
|
||||
return id;
|
||||
}
|
||||
|
||||
/** Expert: just reads and verifies the suffix of an index header */
|
||||
public static String checkIndexHeaderSuffix(DataInput in, String expectedSuffix) throws IOException {
|
||||
int suffixLength = in.readByte() & 0xFF;
|
||||
byte suffixBytes[] = new byte[suffixLength];
|
||||
in.readBytes(suffixBytes, 0, suffixBytes.length);
|
||||
String suffix = new String(suffixBytes, 0, suffixBytes.length, StandardCharsets.UTF_8);
|
||||
if (!suffix.equals(segmentSuffix)) {
|
||||
throw new CorruptIndexException("file mismatch, expected segment suffix=" + segmentSuffix
|
||||
+ ", got=" + suffix, in);
|
||||
if (!suffix.equals(expectedSuffix)) {
|
||||
throw new CorruptIndexException("file mismatch, expected suffix=" + expectedSuffix
|
||||
+ ", got=" + suffix, in);
|
||||
}
|
||||
return version;
|
||||
return suffix;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -41,10 +41,11 @@ public abstract class SegmentInfoFormat {
|
||||
* Read {@link SegmentInfo} data from a directory.
|
||||
* @param directory directory to read from
|
||||
* @param segmentName name of the segment to read
|
||||
* @param segmentID expected identifier for the segment
|
||||
* @return infos instance to be populated with data
|
||||
* @throws IOException If an I/O error occurs
|
||||
*/
|
||||
public abstract SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException;
|
||||
public abstract SegmentInfo read(Directory directory, String segmentName, byte segmentID[], IOContext context) throws IOException;
|
||||
|
||||
/**
|
||||
* Write {@link SegmentInfo} data.
|
||||
|
@ -128,11 +128,11 @@ public final class BlockTreeTermsReader extends FieldsProducer {
|
||||
String termsName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_EXTENSION);
|
||||
try {
|
||||
termsIn = state.directory.openInput(termsName, state.context);
|
||||
version = CodecUtil.checkSegmentHeader(termsIn, TERMS_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
version = CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
String indexName = IndexFileNames.segmentFileName(segment, state.segmentSuffix, TERMS_INDEX_EXTENSION);
|
||||
indexIn = state.directory.openInput(indexName, state.context);
|
||||
CodecUtil.checkSegmentHeader(indexIn, TERMS_INDEX_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(indexIn, TERMS_INDEX_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checksumEntireFile(indexIn);
|
||||
|
||||
// Have PostingsReader init itself
|
||||
|
@ -279,12 +279,12 @@ public final class BlockTreeTermsWriter extends FieldsConsumer {
|
||||
boolean success = false;
|
||||
IndexOutput indexOut = null;
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(termsOut, BlockTreeTermsReader.TERMS_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(termsOut, BlockTreeTermsReader.TERMS_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
final String indexName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, BlockTreeTermsReader.TERMS_INDEX_EXTENSION);
|
||||
indexOut = state.directory.createOutput(indexName, state.context);
|
||||
CodecUtil.writeSegmentHeader(indexOut, BlockTreeTermsReader.TERMS_INDEX_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(indexOut, BlockTreeTermsReader.TERMS_INDEX_CODEC_NAME, BlockTreeTermsReader.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
postingsWriter.init(termsOut, state); // have consumer write its format/header
|
||||
|
@ -64,7 +64,7 @@ public class CompressingStoredFieldsFormat extends StoredFieldsFormat {
|
||||
* <p>
|
||||
* <code>formatName</code> is the name of the format. This name will be used
|
||||
* in the file formats to perform
|
||||
* {@link CodecUtil#checkSegmentHeader codec header checks}.
|
||||
* {@link CodecUtil#checkIndexHeader codec header checks}.
|
||||
* <p>
|
||||
* <code>segmentSuffix</code> is the segment suffix. This suffix is added to
|
||||
* the result file name only if it's not the empty string.
|
||||
|
@ -118,8 +118,8 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
final String codecNameIdx = formatName + CODEC_SFX_IDX;
|
||||
version = CodecUtil.checkSegmentHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.segmentHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
version = CodecUtil.checkIndexHeader(indexStream, codecNameIdx, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
|
||||
maxPointer = indexStream.readVLong();
|
||||
} catch (Throwable exception) {
|
||||
@ -141,11 +141,11 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
|
||||
throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.length(), fieldsStream);
|
||||
}
|
||||
final String codecNameDat = formatName + CODEC_SFX_DAT;
|
||||
final int fieldsVersion = CodecUtil.checkSegmentHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
final int fieldsVersion = CodecUtil.checkIndexHeader(fieldsStream, codecNameDat, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
if (version != fieldsVersion) {
|
||||
throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion, fieldsStream);
|
||||
}
|
||||
assert CodecUtil.segmentHeaderLength(codecNameDat, segmentSuffix) == fieldsStream.getFilePointer();
|
||||
assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == fieldsStream.getFilePointer();
|
||||
|
||||
chunkSize = fieldsStream.readVInt();
|
||||
packedIntsVersion = fieldsStream.readVInt();
|
||||
|
@ -118,10 +118,10 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
|
||||
|
||||
final String codecNameIdx = formatName + CODEC_SFX_IDX;
|
||||
final String codecNameDat = formatName + CODEC_SFX_DAT;
|
||||
CodecUtil.writeSegmentHeader(indexStream, codecNameIdx, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
CodecUtil.writeSegmentHeader(fieldsStream, codecNameDat, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.segmentHeaderLength(codecNameDat, segmentSuffix) == fieldsStream.getFilePointer();
|
||||
assert CodecUtil.segmentHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
CodecUtil.writeIndexHeader(indexStream, codecNameIdx, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(fieldsStream, codecNameDat, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == fieldsStream.getFilePointer();
|
||||
assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
|
||||
indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
|
||||
indexStream = null;
|
||||
|
@ -46,7 +46,7 @@ public class CompressingTermVectorsFormat extends TermVectorsFormat {
|
||||
* <p>
|
||||
* <code>formatName</code> is the name of the format. This name will be used
|
||||
* in the file formats to perform
|
||||
* {@link CodecUtil#checkSegmentHeader codec header checks}.
|
||||
* {@link CodecUtil#checkIndexHeader codec header checks}.
|
||||
* <p>
|
||||
* The <code>compressionMode</code> parameter allows you to choose between
|
||||
* compression algorithms that have various compression and decompression
|
||||
|
@ -114,8 +114,8 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
final String codecNameIdx = formatName + CODEC_SFX_IDX;
|
||||
version = CodecUtil.checkSegmentHeader(input, codecNameIdx, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.segmentHeaderLength(codecNameIdx, segmentSuffix) == input.getFilePointer();
|
||||
version = CodecUtil.checkIndexHeader(input, codecNameIdx, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == input.getFilePointer();
|
||||
indexReader = new CompressingStoredFieldsIndexReader(input, si);
|
||||
input.readVLong(); // the end of the data file
|
||||
} catch (Throwable exception) {
|
||||
@ -133,11 +133,11 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
|
||||
final String vectorsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION);
|
||||
vectorsStream = d.openInput(vectorsStreamFN, context);
|
||||
final String codecNameDat = formatName + CODEC_SFX_DAT;
|
||||
int version2 = CodecUtil.checkSegmentHeader(vectorsStream, codecNameDat, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
int version2 = CodecUtil.checkIndexHeader(vectorsStream, codecNameDat, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
if (version != version2) {
|
||||
throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + version2, vectorsStream);
|
||||
}
|
||||
assert CodecUtil.segmentHeaderLength(codecNameDat, segmentSuffix) == vectorsStream.getFilePointer();
|
||||
assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == vectorsStream.getFilePointer();
|
||||
|
||||
long pos = vectorsStream.getFilePointer();
|
||||
// NOTE: data file is too costly to verify checksum against all the bytes on open,
|
||||
|
@ -231,10 +231,10 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
|
||||
|
||||
final String codecNameIdx = formatName + CODEC_SFX_IDX;
|
||||
final String codecNameDat = formatName + CODEC_SFX_DAT;
|
||||
CodecUtil.writeSegmentHeader(indexStream, codecNameIdx, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
CodecUtil.writeSegmentHeader(vectorsStream, codecNameDat, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.segmentHeaderLength(codecNameDat, segmentSuffix) == vectorsStream.getFilePointer();
|
||||
assert CodecUtil.segmentHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
CodecUtil.writeIndexHeader(indexStream, codecNameIdx, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(vectorsStream, codecNameDat, VERSION_CURRENT, si.getId(), segmentSuffix);
|
||||
assert CodecUtil.indexHeaderLength(codecNameDat, segmentSuffix) == vectorsStream.getFilePointer();
|
||||
assert CodecUtil.indexHeaderLength(codecNameIdx, segmentSuffix) == indexStream.getFilePointer();
|
||||
|
||||
indexWriter = new CompressingStoredFieldsIndexWriter(indexStream);
|
||||
indexStream = null;
|
||||
|
@ -46,7 +46,7 @@ import org.apache.lucene.store.IndexOutput;
|
||||
* <li>Compound (.cfs) --> Header, FileData <sup>FileCount</sup>, Footer</li>
|
||||
* <li>Compound Entry Table (.cfe) --> Header, FileCount, <FileName,
|
||||
* DataOffset, DataLength> <sup>FileCount</sup></li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>FileCount --> {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>DataOffset,DataLength,Checksum --> {@link DataOutput#writeLong UInt64}</li>
|
||||
* <li>FileName --> {@link DataOutput#writeString String}</li>
|
||||
@ -79,8 +79,8 @@ public final class Lucene50CompoundFormat extends CompoundFormat {
|
||||
|
||||
try (IndexOutput data = dir.createOutput(dataFile, context);
|
||||
IndexOutput entries = dir.createOutput(entriesFile, context)) {
|
||||
CodecUtil.writeSegmentHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), "");
|
||||
CodecUtil.writeSegmentHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), "");
|
||||
CodecUtil.writeIndexHeader(data, DATA_CODEC, VERSION_CURRENT, si.getId(), "");
|
||||
CodecUtil.writeIndexHeader(entries, ENTRY_CODEC, VERSION_CURRENT, si.getId(), "");
|
||||
|
||||
// write number of files
|
||||
entries.writeVInt(files.size());
|
||||
|
@ -71,7 +71,7 @@ final class Lucene50CompoundReader extends BaseDirectory {
|
||||
boolean success = false;
|
||||
handle = directory.openInput(dataFileName, context);
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(handle, Lucene50CompoundFormat.DATA_CODEC, version, version, si.getId(), "");
|
||||
CodecUtil.checkIndexHeader(handle, Lucene50CompoundFormat.DATA_CODEC, version, version, si.getId(), "");
|
||||
|
||||
// NOTE: data file is too costly to verify checksum against all the bytes on open,
|
||||
// but for now we at least verify proper structure of the checksum footer: which looks
|
||||
@ -93,7 +93,7 @@ final class Lucene50CompoundReader extends BaseDirectory {
|
||||
try (ChecksumIndexInput entriesStream = dir.openChecksumInput(entriesFileName, IOContext.READONCE)) {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
version = CodecUtil.checkSegmentHeader(entriesStream, Lucene50CompoundFormat.ENTRY_CODEC,
|
||||
version = CodecUtil.checkIndexHeader(entriesStream, Lucene50CompoundFormat.ENTRY_CODEC,
|
||||
Lucene50CompoundFormat.VERSION_START,
|
||||
Lucene50CompoundFormat.VERSION_CURRENT, segmentID, "");
|
||||
final int numEntries = entriesStream.readVInt();
|
||||
|
@ -101,10 +101,10 @@ class Lucene50DocValuesConsumer extends DocValuesConsumer implements Closeable {
|
||||
try {
|
||||
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
|
||||
data = state.directory.createOutput(dataName, state.context);
|
||||
CodecUtil.writeSegmentHeader(data, dataCodec, Lucene50DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(data, dataCodec, Lucene50DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
|
||||
meta = state.directory.createOutput(metaName, state.context);
|
||||
CodecUtil.writeSegmentHeader(meta, metaCodec, Lucene50DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(meta, metaCodec, Lucene50DocValuesFormat.VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
maxDoc = state.segmentInfo.getDocCount();
|
||||
success = true;
|
||||
} finally {
|
||||
|
@ -130,7 +130,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
|
||||
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
version = CodecUtil.checkSegmentHeader(in, metaCodec,
|
||||
version = CodecUtil.checkIndexHeader(in, metaCodec,
|
||||
Lucene50DocValuesFormat.VERSION_START,
|
||||
Lucene50DocValuesFormat.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(),
|
||||
@ -148,7 +148,7 @@ class Lucene50DocValuesProducer extends DocValuesProducer implements Closeable {
|
||||
this.data = state.directory.openInput(dataName, state.context);
|
||||
boolean success = false;
|
||||
try {
|
||||
final int version2 = CodecUtil.checkSegmentHeader(data, dataCodec,
|
||||
final int version2 = CodecUtil.checkIndexHeader(data, dataCodec,
|
||||
Lucene50DocValuesFormat.VERSION_START,
|
||||
Lucene50DocValuesFormat.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(),
|
||||
|
@ -46,7 +46,7 @@ import org.apache.lucene.store.IndexOutput;
|
||||
* FieldBits,DocValuesBits,DocValuesGen,Attributes> <sup>FieldsCount</sup>,Footer</p>
|
||||
* <p>Data types:
|
||||
* <ul>
|
||||
* <li>Header --> {@link CodecUtil#checkSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#checkIndexHeader IndexHeader}</li>
|
||||
* <li>FieldsCount --> {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>FieldName --> {@link DataOutput#writeString String}</li>
|
||||
* <li>FieldBits, DocValuesBits --> {@link DataOutput#writeByte Byte}</li>
|
||||
@ -114,7 +114,7 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
|
||||
Throwable priorE = null;
|
||||
FieldInfo infos[] = null;
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(input, Lucene50FieldInfosFormat.CODEC_NAME,
|
||||
CodecUtil.checkIndexHeader(input, Lucene50FieldInfosFormat.CODEC_NAME,
|
||||
Lucene50FieldInfosFormat.FORMAT_START,
|
||||
Lucene50FieldInfosFormat.FORMAT_CURRENT,
|
||||
segmentInfo.getId(), segmentSuffix);
|
||||
@ -190,7 +190,7 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat {
|
||||
public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene50FieldInfosFormat.EXTENSION);
|
||||
try (IndexOutput output = directory.createOutput(fileName, context)) {
|
||||
CodecUtil.writeSegmentHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix);
|
||||
output.writeVInt(infos.size());
|
||||
for (FieldInfo fi : infos) {
|
||||
fi.checkConsistency();
|
||||
|
@ -43,8 +43,7 @@ import org.apache.lucene.util.MutableBits;
|
||||
* files.</p>
|
||||
* <p>Deletions (.liv) --> SegmentHeader,Generation,Bits</p>
|
||||
* <ul>
|
||||
* <li>SegmentHeader --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Generation --> {@link DataOutput#writeLong Int64}
|
||||
* <li>SegmentHeader --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>Bits --> <{@link DataOutput#writeLong Int64}> <sup>LongCount</sup></li>
|
||||
* </ul>
|
||||
*/
|
||||
@ -85,11 +84,8 @@ public final class Lucene50LiveDocsFormat extends LiveDocsFormat {
|
||||
try (ChecksumIndexInput input = dir.openChecksumInput(name, context)) {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
CodecUtil.checkSegmentHeader(input, CODEC_NAME, VERSION_START, VERSION_CURRENT, info.info.getId(), "");
|
||||
long filegen = input.readLong();
|
||||
if (gen != filegen) {
|
||||
throw new CorruptIndexException("file mismatch, expected generation=" + gen + ", got=" + filegen, input);
|
||||
}
|
||||
CodecUtil.checkIndexHeader(input, CODEC_NAME, VERSION_START, VERSION_CURRENT,
|
||||
info.info.getId(), Long.toString(gen, Character.MAX_RADIX));
|
||||
long data[] = new long[FixedBitSet.bits2words(length)];
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] = input.readLong();
|
||||
@ -120,8 +116,7 @@ public final class Lucene50LiveDocsFormat extends LiveDocsFormat {
|
||||
}
|
||||
long data[] = fbs.getBits();
|
||||
try (IndexOutput output = dir.createOutput(name, context)) {
|
||||
CodecUtil.writeSegmentHeader(output, CODEC_NAME, VERSION_CURRENT, info.info.getId(), "");
|
||||
output.writeLong(gen);
|
||||
CodecUtil.writeIndexHeader(output, CODEC_NAME, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX));
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
output.writeLong(data[i]);
|
||||
}
|
||||
|
@ -62,10 +62,10 @@ class Lucene50NormsConsumer extends NormsConsumer {
|
||||
try {
|
||||
String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension);
|
||||
data = state.directory.createOutput(dataName, state.context);
|
||||
CodecUtil.writeSegmentHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(data, dataCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension);
|
||||
meta = state.directory.createOutput(metaName, state.context);
|
||||
CodecUtil.writeSegmentHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(meta, metaCodec, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
|
@ -93,7 +93,7 @@ class Lucene50NormsProducer extends NormsProducer {
|
||||
try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) {
|
||||
Throwable priorE = null;
|
||||
try {
|
||||
version = CodecUtil.checkSegmentHeader(in, metaCodec, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
version = CodecUtil.checkIndexHeader(in, metaCodec, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
readFields(in, state.fieldInfos);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
@ -106,7 +106,7 @@ class Lucene50NormsProducer extends NormsProducer {
|
||||
this.data = state.directory.openInput(dataName, state.context);
|
||||
boolean success = false;
|
||||
try {
|
||||
final int version2 = CodecUtil.checkSegmentHeader(data, dataCodec, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
final int version2 = CodecUtil.checkIndexHeader(data, dataCodec, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
if (version != version2) {
|
||||
throw new CorruptIndexException("Format versions mismatch: meta=" + version + ",data=" + version2, data);
|
||||
}
|
||||
|
@ -128,14 +128,14 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* <li>PostingsHeader --> Header, PackedBlockSize</li>
|
||||
* <li>TermMetadata --> (DocFPDelta|SingletonDocID), PosFPDelta?, PosVIntBlockFPDelta?, PayFPDelta?,
|
||||
* SkipFPDelta?</li>
|
||||
* <li>Header, --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header, --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>PackedBlockSize, SingletonDocID --> {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>DocFPDelta, PosFPDelta, PayFPDelta, PosVIntBlockFPDelta, SkipFPDelta --> {@link DataOutput#writeVLong VLong}</li>
|
||||
* <li>Footer --> {@link CodecUtil#writeFooter CodecFooter}</li>
|
||||
* </ul>
|
||||
* <p>Notes:</p>
|
||||
* <ul>
|
||||
* <li>Header is a {@link CodecUtil#writeSegmentHeader SegmentHeader} storing the version information
|
||||
* <li>Header is a {@link CodecUtil#writeIndexHeader IndexHeader} storing the version information
|
||||
* for the postings.</li>
|
||||
* <li>PackedBlockSize is the fixed block size for packed blocks. In packed block, bit width is
|
||||
* determined by the largest integer. Smaller block size result in smaller variance among width
|
||||
@ -191,7 +191,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
*
|
||||
* <ul>
|
||||
* <li>docFile(.doc) --> Header, <TermFreqs, SkipData?><sup>TermCount</sup>, Footer</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>TermFreqs --> <PackedBlock> <sup>PackedDocBlockNum</sup>,
|
||||
* VIntBlock? </li>
|
||||
* <li>PackedBlock --> PackedDocDeltaBlock, PackedFreqBlock?
|
||||
@ -275,7 +275,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* sometimes stores part of payloads and offsets for speedup.</p>
|
||||
* <ul>
|
||||
* <li>PosFile(.pos) --> Header, <TermPositions> <sup>TermCount</sup>, Footer</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>TermPositions --> <PackedPosDeltaBlock> <sup>PackedPosBlockNum</sup>,
|
||||
* VIntBlock? </li>
|
||||
* <li>VIntBlock --> <PositionDelta[, PayloadLength?], PayloadData?,
|
||||
@ -328,7 +328,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* Some payloads and offsets will be separated out into .pos file, for performance reasons.</p>
|
||||
* <ul>
|
||||
* <li>PayFile(.pay): --> Header, <TermPayloads, TermOffsets?> <sup>TermCount</sup>, Footer</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>TermPayloads --> <PackedPayLengthBlock, SumPayLength, PayData> <sup>PackedPayBlockNum</sup>
|
||||
* <li>TermOffsets --> <PackedOffsetStartDeltaBlock, PackedOffsetLengthBlock> <sup>PackedPayBlockNum</sup>
|
||||
* <li>PackedPayLengthBlock, PackedOffsetStartDeltaBlock, PackedOffsetLengthBlock --> {@link PackedInts PackedInts}</li>
|
||||
|
@ -83,20 +83,20 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
|
||||
String docName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.DOC_EXTENSION);
|
||||
try {
|
||||
docIn = state.directory.openInput(docName, state.context);
|
||||
version = CodecUtil.checkSegmentHeader(docIn, DOC_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
version = CodecUtil.checkIndexHeader(docIn, DOC_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
forUtil = new ForUtil(docIn);
|
||||
CodecUtil.retrieveChecksum(docIn);
|
||||
|
||||
if (state.fieldInfos.hasProx()) {
|
||||
String proxName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.POS_EXTENSION);
|
||||
posIn = state.directory.openInput(proxName, state.context);
|
||||
CodecUtil.checkSegmentHeader(posIn, POS_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(posIn, POS_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.retrieveChecksum(posIn);
|
||||
|
||||
if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) {
|
||||
String payName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.PAY_EXTENSION);
|
||||
payIn = state.directory.openInput(payName, state.context);
|
||||
CodecUtil.checkSegmentHeader(payIn, PAY_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(payIn, PAY_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.retrieveChecksum(payIn);
|
||||
}
|
||||
}
|
||||
@ -115,7 +115,7 @@ public final class Lucene50PostingsReader extends PostingsReaderBase {
|
||||
@Override
|
||||
public void init(IndexInput termsIn, SegmentReadState state) throws IOException {
|
||||
// Make sure we are talking to the matching postings writer
|
||||
CodecUtil.checkSegmentHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
final int indexBlockSize = termsIn.readVInt();
|
||||
if (indexBlockSize != BLOCK_SIZE) {
|
||||
throw new IllegalStateException("index-time BLOCK_SIZE (" + indexBlockSize + ") != read-time BLOCK_SIZE (" + BLOCK_SIZE + ")");
|
||||
|
@ -107,14 +107,14 @@ public final class Lucene50PostingsWriter extends PushPostingsWriterBase {
|
||||
IndexOutput payOut = null;
|
||||
boolean success = false;
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(docOut, DOC_CODEC, VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(docOut, DOC_CODEC, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
forUtil = new ForUtil(acceptableOverheadRatio, docOut);
|
||||
if (state.fieldInfos.hasProx()) {
|
||||
posDeltaBuffer = new int[MAX_DATA_SIZE];
|
||||
String posFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.POS_EXTENSION);
|
||||
posOut = state.directory.createOutput(posFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(posOut, POS_CODEC, VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(posOut, POS_CODEC, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
if (state.fieldInfos.hasPayloads()) {
|
||||
@ -136,7 +136,7 @@ public final class Lucene50PostingsWriter extends PushPostingsWriterBase {
|
||||
if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) {
|
||||
String payFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.PAY_EXTENSION);
|
||||
payOut = state.directory.createOutput(payFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(payOut, PAY_CODEC, VERSION_CURRENT,
|
||||
CodecUtil.writeIndexHeader(payOut, PAY_CODEC, VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
}
|
||||
} else {
|
||||
@ -176,7 +176,7 @@ public final class Lucene50PostingsWriter extends PushPostingsWriterBase {
|
||||
|
||||
@Override
|
||||
public void init(IndexOutput termsOut, SegmentWriteState state) throws IOException {
|
||||
CodecUtil.writeSegmentHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
termsOut.writeVInt(BLOCK_SIZE);
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ import org.apache.lucene.util.Version;
|
||||
* Data types:
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>SegSize --> {@link DataOutput#writeInt Int32}</li>
|
||||
* <li>SegVersion --> {@link DataOutput#writeString String}</li>
|
||||
* <li>Files --> {@link DataOutput#writeStringSet Set<String>}</li>
|
||||
@ -83,21 +83,16 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException {
|
||||
public SegmentInfo read(Directory dir, String segment, byte[] segmentID, IOContext context) throws IOException {
|
||||
final String fileName = IndexFileNames.segmentFileName(segment, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
|
||||
try (ChecksumIndexInput input = dir.openChecksumInput(fileName, context)) {
|
||||
Throwable priorE = null;
|
||||
SegmentInfo si = null;
|
||||
try {
|
||||
CodecUtil.checkHeader(input, Lucene50SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene50SegmentInfoFormat.VERSION_START,
|
||||
Lucene50SegmentInfoFormat.VERSION_CURRENT);
|
||||
byte id[] = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(id, 0, id.length);
|
||||
String suffix = input.readString();
|
||||
if (!suffix.isEmpty()) {
|
||||
throw new CorruptIndexException("invalid codec header: got unexpected suffix: " + suffix, input);
|
||||
}
|
||||
CodecUtil.checkIndexHeader(input, Lucene50SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene50SegmentInfoFormat.VERSION_START,
|
||||
Lucene50SegmentInfoFormat.VERSION_CURRENT,
|
||||
segmentID, "");
|
||||
final Version version = Version.fromBits(input.readInt(), input.readInt(), input.readInt());
|
||||
|
||||
final int docCount = input.readInt();
|
||||
@ -108,7 +103,7 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
|
||||
final Map<String,String> diagnostics = input.readStringStringMap();
|
||||
final Set<String> files = input.readStringSet();
|
||||
|
||||
si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, id);
|
||||
si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, segmentID);
|
||||
si.setFiles(files);
|
||||
} catch (Throwable exception) {
|
||||
priorE = exception;
|
||||
@ -126,8 +121,7 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
|
||||
|
||||
boolean success = false;
|
||||
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
|
||||
// NOTE: we encode ID in the segment header, for format consistency with all other per-segment files
|
||||
CodecUtil.writeSegmentHeader(output,
|
||||
CodecUtil.writeIndexHeader(output,
|
||||
Lucene50SegmentInfoFormat.CODEC_NAME,
|
||||
Lucene50SegmentInfoFormat.VERSION_CURRENT,
|
||||
si.getId(),
|
||||
|
@ -52,7 +52,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* <p>Here is a more detailed description of the field data file format:</p>
|
||||
* <ul>
|
||||
* <li>FieldData (.fdt) --> <Header>, PackedIntsVersion, <Chunk><sup>ChunkCount</sup></li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>PackedIntsVersion --> {@link PackedInts#VERSION_CURRENT} as a {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>ChunkCount is not known in advance and is the number of chunks necessary to store all document of the segment</li>
|
||||
* <li>Chunk --> DocBase, ChunkDocs, DocFieldCounts, DocLengths, <CompressedDocs></li>
|
||||
@ -104,7 +104,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* <p>A fields index file (extension <tt>.fdx</tt>).</p>
|
||||
* <ul>
|
||||
* <li>FieldsIndex (.fdx) --> <Header>, <ChunkIndex></li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>ChunkIndex: See {@link CompressingStoredFieldsIndexWriter}</li>
|
||||
* </ul>
|
||||
* </li>
|
||||
|
@ -59,7 +59,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* <p>Here is a more detailed description of the field data file format:</p>
|
||||
* <ul>
|
||||
* <li>VectorData (.tvd) --> <Header>, PackedIntsVersion, ChunkSize, <Chunk><sup>ChunkCount</sup>, Footer</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>PackedIntsVersion --> {@link PackedInts#VERSION_CURRENT} as a {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>ChunkSize is the number of bytes of terms to accumulate before flushing, as a {@link DataOutput#writeVInt VInt}</li>
|
||||
* <li>ChunkCount is not known in advance and is the number of chunks necessary to store all document of the segment</li>
|
||||
@ -113,7 +113,7 @@ import org.apache.lucene.util.packed.PackedInts;
|
||||
* <p>An index file (extension <tt>.tvx</tt>).</p>
|
||||
* <ul>
|
||||
* <li>VectorIndex (.tvx) --> <Header>, <ChunkIndex>, Footer</li>
|
||||
* <li>Header --> {@link CodecUtil#writeSegmentHeader SegmentHeader}</li>
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>ChunkIndex: See {@link CompressingStoredFieldsIndexWriter}</li>
|
||||
* <li>Footer --> {@link CodecUtil#writeFooter CodecFooter}</li>
|
||||
* </ul>
|
||||
|
@ -451,7 +451,7 @@ public class CheckIndex implements Closeable {
|
||||
public Status checkIndex(List<String> onlySegments) throws IOException {
|
||||
ensureOpen();
|
||||
NumberFormat nf = NumberFormat.getInstance(Locale.ROOT);
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
SegmentInfos sis = null;
|
||||
Status result = new Status();
|
||||
result.dir = dir;
|
||||
String[] files = dir.listAll();
|
||||
@ -462,7 +462,7 @@ public class CheckIndex implements Closeable {
|
||||
try {
|
||||
// Do not use SegmentInfos.read(Directory) since the spooky
|
||||
// retrying it does is not necessary here (we hold the write lock):
|
||||
sis.read(dir, lastSegmentsFile);
|
||||
sis = SegmentInfos.readCommit(dir, lastSegmentsFile);
|
||||
} catch (Throwable t) {
|
||||
if (failFast) {
|
||||
IOUtils.reThrow(t);
|
||||
|
@ -225,8 +225,7 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
|
||||
|
||||
List<IndexCommit> commits = new ArrayList<>();
|
||||
|
||||
SegmentInfos latest = new SegmentInfos();
|
||||
latest.read(dir);
|
||||
SegmentInfos latest = SegmentInfos.readLatestCommit(dir);
|
||||
final long currentGen = latest.getGeneration();
|
||||
|
||||
commits.add(new StandardDirectoryReader.ReaderCommit(latest, dir));
|
||||
@ -239,11 +238,11 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
|
||||
!fileName.equals(IndexFileNames.OLD_SEGMENTS_GEN) &&
|
||||
SegmentInfos.generationFromSegmentsFileName(fileName) < currentGen) {
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
SegmentInfos sis = null;
|
||||
try {
|
||||
// IOException allowed to throw there, in case
|
||||
// segments_N is corrupt
|
||||
sis.read(dir, fileName);
|
||||
sis = SegmentInfos.readCommit(dir, fileName);
|
||||
} catch (FileNotFoundException | NoSuchFileException fnfe) {
|
||||
// LUCENE-948: on NFS (and maybe others), if
|
||||
// you have writers switching back and forth
|
||||
@ -252,7 +251,6 @@ public abstract class DirectoryReader extends BaseCompositeReader<LeafReader> {
|
||||
// file segments_X exists when in fact it
|
||||
// doesn't. So, we catch this and handle it
|
||||
// as if the file does not exist
|
||||
sis = null;
|
||||
}
|
||||
|
||||
if (sis != null)
|
||||
|
@ -165,9 +165,9 @@ final class IndexFileDeleter implements Closeable {
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "init: load commit \"" + fileName + "\"");
|
||||
}
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
SegmentInfos sis = null;
|
||||
try {
|
||||
sis.read(directory, fileName);
|
||||
sis = SegmentInfos.readCommit(directory, fileName);
|
||||
} catch (FileNotFoundException | NoSuchFileException e) {
|
||||
// LUCENE-948: on NFS (and maybe others), if
|
||||
// you have writers switching back and forth
|
||||
@ -179,7 +179,6 @@ final class IndexFileDeleter implements Closeable {
|
||||
if (infoStream.isEnabled("IFD")) {
|
||||
infoStream.message("IFD", "init: hit FileNotFoundException when loading commit \"" + fileName + "\"; skipping this commit point");
|
||||
}
|
||||
sis = null;
|
||||
} catch (IOException e) {
|
||||
if (SegmentInfos.generationFromSegmentsFileName(fileName) <= currentGen && directory.fileLength(fileName) > 0) {
|
||||
throw e;
|
||||
@ -187,7 +186,6 @@ final class IndexFileDeleter implements Closeable {
|
||||
// Most likely we are opening an index that
|
||||
// has an aborted "future" commit, so suppress
|
||||
// exc in this case
|
||||
sis = null;
|
||||
}
|
||||
}
|
||||
if (sis != null) {
|
||||
@ -215,9 +213,9 @@ final class IndexFileDeleter implements Closeable {
|
||||
// listing was stale (eg when index accessed via NFS
|
||||
// client with stale directory listing cache). So we
|
||||
// try now to explicitly open this commit point:
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
SegmentInfos sis = null;
|
||||
try {
|
||||
sis.read(directory, currentSegmentsFile);
|
||||
sis = SegmentInfos.readCommit(directory, currentSegmentsFile);
|
||||
} catch (IOException e) {
|
||||
throw new CorruptIndexException("unable to read current segments_N file", currentSegmentsFile, e);
|
||||
}
|
||||
|
@ -773,7 +773,6 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||
|
||||
// If index is too old, reading the segments will throw
|
||||
// IndexFormatTooOldException.
|
||||
segmentInfos = new SegmentInfos();
|
||||
|
||||
boolean initialIndexExists = true;
|
||||
|
||||
@ -782,13 +781,17 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||
// against an index that's currently open for
|
||||
// searching. In this case we write the next
|
||||
// segments_N file with no segments:
|
||||
SegmentInfos sis = null;
|
||||
try {
|
||||
segmentInfos.read(directory);
|
||||
segmentInfos.clear();
|
||||
sis = SegmentInfos.readLatestCommit(directory);
|
||||
sis.clear();
|
||||
} catch (IOException e) {
|
||||
// Likely this means it's a fresh directory
|
||||
initialIndexExists = false;
|
||||
sis = new SegmentInfos();
|
||||
}
|
||||
|
||||
segmentInfos = sis;
|
||||
|
||||
// Record that we have a change (zero out all
|
||||
// segments) pending:
|
||||
@ -802,7 +805,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||
|
||||
// Do not use SegmentInfos.read(Directory) since the spooky
|
||||
// retrying it does is not necessary here (we hold the write lock):
|
||||
segmentInfos.read(directory, lastSegmentsFile);
|
||||
segmentInfos = SegmentInfos.readCommit(directory, lastSegmentsFile);
|
||||
|
||||
IndexCommit commit = config.getIndexCommit();
|
||||
if (commit != null) {
|
||||
@ -813,8 +816,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||
// points.
|
||||
if (commit.getDirectory() != directory)
|
||||
throw new IllegalArgumentException("IndexCommit's directory doesn't match my directory");
|
||||
SegmentInfos oldInfos = new SegmentInfos();
|
||||
oldInfos.read(directory, commit.getSegmentsFileName());
|
||||
SegmentInfos oldInfos = SegmentInfos.readCommit(directory, commit.getSegmentsFileName());
|
||||
segmentInfos.replace(oldInfos);
|
||||
changed();
|
||||
if (infoStream.isEnabled("IW")) {
|
||||
@ -2401,8 +2403,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
|
||||
if (infoStream.isEnabled("IW")) {
|
||||
infoStream.message("IW", "addIndexes: process directory " + dir);
|
||||
}
|
||||
SegmentInfos sis = new SegmentInfos(); // read infos from dir
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir); // read infos from dir
|
||||
totalDocCount += sis.totalDocCount();
|
||||
|
||||
for (SegmentCommitInfo info : sis) {
|
||||
|
@ -67,8 +67,8 @@ import org.apache.lucene.util.StringHelper;
|
||||
* Data types:
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>Header --> {@link CodecUtil#writeHeader CodecHeader}</li>
|
||||
* <li>GenHeader, NameCounter, SegCount, DeletionCount -->
|
||||
* <li>Header --> {@link CodecUtil#writeIndexHeader IndexHeader}</li>
|
||||
* <li>NameCounter, SegCount, DeletionCount -->
|
||||
* {@link DataOutput#writeInt Int32}</li>
|
||||
* <li>Generation, Version, DelGen, Checksum, FieldInfosGen, DocValuesGen -->
|
||||
* {@link DataOutput#writeLong Int64}</li>
|
||||
@ -155,8 +155,8 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
private byte[] id;
|
||||
|
||||
/** Sole constructor. Typically you call this and then
|
||||
* use {@link #read(Directory) or
|
||||
* #read(Directory,String)} to populate each {@link
|
||||
* use {@link #readLatestCommit(Directory) or
|
||||
* #readCommit(Directory,String)} to populate each {@link
|
||||
* SegmentCommitInfo}. Alternatively, you can add/remove your
|
||||
* own {@link SegmentCommitInfo}s. */
|
||||
public SegmentInfos() {
|
||||
@ -245,21 +245,14 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
throw new IllegalArgumentException("fileName \"" + fileName + "\" is not a segments file");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next pending_segments_N filename that will be written.
|
||||
*/
|
||||
public String getNextPendingSegmentFileName() {
|
||||
long nextGeneration;
|
||||
|
||||
|
||||
/** return generation of the next pending_segments_N that will be written */
|
||||
private long getNextPendingGeneration() {
|
||||
if (generation == -1) {
|
||||
nextGeneration = 1;
|
||||
return 1;
|
||||
} else {
|
||||
nextGeneration = generation+1;
|
||||
return generation+1;
|
||||
}
|
||||
return IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS,
|
||||
"",
|
||||
nextGeneration);
|
||||
}
|
||||
|
||||
/** Since Lucene 5.0, every commit (segments_N) writes a unique id. This will
|
||||
@ -277,18 +270,10 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
* @throws CorruptIndexException if the index is corrupt
|
||||
* @throws IOException if there is a low-level IO error
|
||||
*/
|
||||
public final void read(Directory directory, String segmentFileName) throws IOException {
|
||||
boolean success = false;
|
||||
public static final SegmentInfos readCommit(Directory directory, String segmentFileName) throws IOException {
|
||||
|
||||
// Clear any previous segments:
|
||||
this.clear();
|
||||
|
||||
generation = generationFromSegmentsFileName(segmentFileName);
|
||||
|
||||
lastGeneration = generation;
|
||||
|
||||
ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ);
|
||||
try {
|
||||
long generation = generationFromSegmentsFileName(segmentFileName);
|
||||
try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READ)) {
|
||||
// NOTE: as long as we want to throw indexformattooold (vs corruptindexexception), we need
|
||||
// to read the magic ourselves.
|
||||
int magic = input.readInt();
|
||||
@ -297,17 +282,42 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
}
|
||||
// 4.0+
|
||||
int format = CodecUtil.checkHeaderNoMagic(input, "segments", VERSION_40, VERSION_50);
|
||||
version = input.readLong();
|
||||
counter = input.readInt();
|
||||
// 5.0+
|
||||
byte id[] = null;
|
||||
if (format >= VERSION_50) {
|
||||
id = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(id, 0, id.length);
|
||||
CodecUtil.checkIndexHeaderSuffix(input, Long.toString(generation, Character.MAX_RADIX));
|
||||
}
|
||||
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.id = id;
|
||||
infos.generation = generation;
|
||||
infos.lastGeneration = generation;
|
||||
infos.version = input.readLong();
|
||||
infos.counter = input.readInt();
|
||||
int numSegments = input.readInt();
|
||||
if (numSegments < 0) {
|
||||
throw new CorruptIndexException("invalid segment count: " + numSegments, input);
|
||||
}
|
||||
for (int seg = 0; seg < numSegments; seg++) {
|
||||
String segName = input.readString();
|
||||
final byte segmentID[];
|
||||
if (format >= VERSION_50) {
|
||||
byte hasID = input.readByte();
|
||||
if (hasID == 1) {
|
||||
segmentID = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(segmentID, 0, segmentID.length);
|
||||
} else if (hasID == 0) {
|
||||
segmentID = null; // 4.x segment, doesn't have an ID
|
||||
} else {
|
||||
throw new CorruptIndexException("invalid hasID byte, got: " + hasID, input);
|
||||
}
|
||||
} else {
|
||||
segmentID = null;
|
||||
}
|
||||
Codec codec = Codec.forName(input.readString());
|
||||
//System.out.println("SIS.read seg=" + seg + " codec=" + codec);
|
||||
SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, IOContext.READ);
|
||||
SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ);
|
||||
info.setCodec(codec);
|
||||
long delGen = input.readLong();
|
||||
int delCount = input.readInt();
|
||||
@ -358,13 +368,9 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
siPerCommit.setDocValuesUpdatesFiles(dvUpdateFiles);
|
||||
}
|
||||
}
|
||||
add(siPerCommit);
|
||||
}
|
||||
userData = input.readStringStringMap();
|
||||
if (format >= VERSION_50) {
|
||||
id = new byte[StringHelper.ID_LENGTH];
|
||||
input.readBytes(id, 0, id.length);
|
||||
infos.add(siPerCommit);
|
||||
}
|
||||
infos.userData = input.readStringStringMap();
|
||||
|
||||
if (format >= VERSION_48) {
|
||||
CodecUtil.checkFooter(input);
|
||||
@ -378,30 +384,17 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
CodecUtil.checkEOF(input);
|
||||
}
|
||||
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
// Clear any segment infos we had loaded so we
|
||||
// have a clean slate on retry:
|
||||
this.clear();
|
||||
IOUtils.closeWhileHandlingException(input);
|
||||
} else {
|
||||
input.close();
|
||||
}
|
||||
return infos;
|
||||
}
|
||||
}
|
||||
|
||||
/** Find the latest commit ({@code segments_N file}) and
|
||||
* load all {@link SegmentCommitInfo}s. */
|
||||
public final void read(Directory directory) throws IOException {
|
||||
generation = lastGeneration = -1;
|
||||
|
||||
new FindSegmentsFile(directory) {
|
||||
|
||||
public static final SegmentInfos readLatestCommit(Directory directory) throws IOException {
|
||||
return new FindSegmentsFile<SegmentInfos>(directory) {
|
||||
@Override
|
||||
protected Object doBody(String segmentFileName) throws IOException {
|
||||
read(directory, segmentFileName);
|
||||
return null;
|
||||
protected SegmentInfos doBody(String segmentFileName) throws IOException {
|
||||
return readCommit(directory, segmentFileName);
|
||||
}
|
||||
}.run();
|
||||
}
|
||||
@ -412,27 +405,38 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
|
||||
private void write(Directory directory) throws IOException {
|
||||
|
||||
String segmentFileName = getNextPendingSegmentFileName();
|
||||
long nextGeneration = getNextPendingGeneration();
|
||||
String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS,
|
||||
"",
|
||||
nextGeneration);
|
||||
|
||||
// Always advance the generation on write:
|
||||
if (generation == -1) {
|
||||
generation = 1;
|
||||
} else {
|
||||
generation++;
|
||||
}
|
||||
generation = nextGeneration;
|
||||
|
||||
IndexOutput segnOutput = null;
|
||||
boolean success = false;
|
||||
|
||||
try {
|
||||
segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT);
|
||||
CodecUtil.writeHeader(segnOutput, "segments", VERSION_50);
|
||||
CodecUtil.writeIndexHeader(segnOutput, "segments", VERSION_50,
|
||||
StringHelper.randomId(), Long.toString(nextGeneration, Character.MAX_RADIX));
|
||||
segnOutput.writeLong(version);
|
||||
segnOutput.writeInt(counter); // write counter
|
||||
segnOutput.writeInt(size()); // write infos
|
||||
for (SegmentCommitInfo siPerCommit : this) {
|
||||
SegmentInfo si = siPerCommit.info;
|
||||
segnOutput.writeString(si.name);
|
||||
byte segmentID[] = si.getId();
|
||||
// TODO: remove this in lucene 6, we don't need to include 4.x segments in commits anymore
|
||||
if (segmentID == null) {
|
||||
segnOutput.writeByte((byte)0);
|
||||
} else {
|
||||
if (segmentID.length != StringHelper.ID_LENGTH) {
|
||||
throw new IllegalStateException("cannot write segment: invalid id segment=" + si.name + "id=" + StringHelper.idToString(segmentID));
|
||||
}
|
||||
segnOutput.writeByte((byte)1);
|
||||
segnOutput.writeBytes(segmentID, segmentID.length);
|
||||
}
|
||||
segnOutput.writeString(si.getCodec().getName());
|
||||
segnOutput.writeLong(siPerCommit.getDelGen());
|
||||
int delCount = siPerCommit.getDelCount();
|
||||
@ -452,8 +456,6 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
assert si.dir == directory;
|
||||
}
|
||||
segnOutput.writeStringStringMap(userData);
|
||||
byte[] id = StringHelper.randomId();
|
||||
segnOutput.writeBytes(id, 0, id.length);
|
||||
CodecUtil.writeFooter(segnOutput);
|
||||
segnOutput.close();
|
||||
directory.sync(Collections.singleton(segmentFileName));
|
||||
@ -547,7 +549,7 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
* time, etc., it could have been deleted due to a writer
|
||||
* commit finishing.
|
||||
*/
|
||||
public abstract static class FindSegmentsFile {
|
||||
public abstract static class FindSegmentsFile<T> {
|
||||
|
||||
final Directory directory;
|
||||
|
||||
@ -558,12 +560,12 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
|
||||
/** Locate the most recent {@code segments} file and
|
||||
* run {@link #doBody} on it. */
|
||||
public Object run() throws IOException {
|
||||
public T run() throws IOException {
|
||||
return run(null);
|
||||
}
|
||||
|
||||
/** Run {@link #doBody} on the provided commit. */
|
||||
public Object run(IndexCommit commit) throws IOException {
|
||||
public T run(IndexCommit commit) throws IOException {
|
||||
if (commit != null) {
|
||||
if (directory != commit.getDirectory())
|
||||
throw new IOException("the specified commit does not match the specified Directory");
|
||||
@ -607,11 +609,11 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen);
|
||||
|
||||
try {
|
||||
Object v = doBody(segmentFileName);
|
||||
T t = doBody(segmentFileName);
|
||||
if (infoStream != null) {
|
||||
message("success on " + segmentFileName);
|
||||
}
|
||||
return v;
|
||||
return t;
|
||||
} catch (IOException err) {
|
||||
// Save the original root cause:
|
||||
if (exc == null) {
|
||||
@ -634,7 +636,7 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentCommitInfo
|
||||
* during the processing that could have been caused by
|
||||
* a writer committing.
|
||||
*/
|
||||
protected abstract Object doBody(String segmentFileName) throws IOException;
|
||||
protected abstract T doBody(String segmentFileName) throws IOException;
|
||||
}
|
||||
|
||||
// Carry over generation numbers from another SegmentInfos
|
||||
|
@ -47,11 +47,10 @@ final class StandardDirectoryReader extends DirectoryReader {
|
||||
|
||||
/** called from DirectoryReader.open(...) methods */
|
||||
static DirectoryReader open(final Directory directory, final IndexCommit commit) throws IOException {
|
||||
return (DirectoryReader) new SegmentInfos.FindSegmentsFile(directory) {
|
||||
return new SegmentInfos.FindSegmentsFile<DirectoryReader>(directory) {
|
||||
@Override
|
||||
protected Object doBody(String segmentFileName) throws IOException {
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(directory, segmentFileName);
|
||||
protected DirectoryReader doBody(String segmentFileName) throws IOException {
|
||||
SegmentInfos sis = SegmentInfos.readCommit(directory, segmentFileName);
|
||||
final SegmentReader[] readers = new SegmentReader[sis.size()];
|
||||
for (int i = sis.size()-1; i >= 0; i--) {
|
||||
boolean success = false;
|
||||
@ -309,11 +308,10 @@ final class StandardDirectoryReader extends DirectoryReader {
|
||||
}
|
||||
|
||||
private DirectoryReader doOpenFromCommit(IndexCommit commit) throws IOException {
|
||||
return (DirectoryReader) new SegmentInfos.FindSegmentsFile(directory) {
|
||||
return new SegmentInfos.FindSegmentsFile<DirectoryReader>(directory) {
|
||||
@Override
|
||||
protected Object doBody(String segmentFileName) throws IOException {
|
||||
final SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(directory, segmentFileName);
|
||||
protected DirectoryReader doBody(String segmentFileName) throws IOException {
|
||||
final SegmentInfos infos = SegmentInfos.readCommit(directory, segmentFileName);
|
||||
return doOpenIfChanged(infos);
|
||||
}
|
||||
}.run(commit);
|
||||
@ -338,8 +336,7 @@ final class StandardDirectoryReader extends DirectoryReader {
|
||||
// IndexWriter.prepareCommit has been called (but not
|
||||
// yet commit), then the reader will still see itself as
|
||||
// current:
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(directory);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(directory);
|
||||
|
||||
// we loaded SegmentInfos from the directory
|
||||
return sis.getVersion() == segmentInfos.getVersion();
|
||||
|
@ -1116,8 +1116,7 @@ public class TestAddIndexes extends LuceneTestCase {
|
||||
w3.close();
|
||||
// we should now see segments_X,
|
||||
// _Y.cfs,_Y.cfe, _Z.si
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals("Only one compound segment should exist", 1, sis.size());
|
||||
assertTrue(sis.info(0).info.getUseCompoundFile());
|
||||
dir.close();
|
||||
|
@ -26,7 +26,6 @@ import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
@ -36,10 +35,6 @@ import org.apache.lucene.util.TestUtil;
|
||||
public class TestAllFilesHaveChecksumFooter extends LuceneTestCase {
|
||||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// Else we might remove .cfe but not the corresponding .cfs, causing false exc when trying to verify headers:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
conf.setCodec(TestUtil.getDefaultCodec());
|
||||
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
|
||||
@ -68,8 +63,7 @@ public class TestAllFilesHaveChecksumFooter extends LuceneTestCase {
|
||||
}
|
||||
|
||||
private void checkFooters(Directory dir) throws IOException {
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
checkFooter(dir, sis.getSegmentsFileName());
|
||||
|
||||
for (SegmentCommitInfo si : sis) {
|
||||
|
@ -30,9 +30,7 @@ import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
/**
|
||||
@ -42,11 +40,6 @@ public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
|
||||
public void test() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
|
||||
if (dir instanceof MockDirectoryWrapper) {
|
||||
// Else we might remove .cfe but not the corresponding .cfs, causing false exc when trying to verify headers:
|
||||
((MockDirectoryWrapper) dir).setEnableVirusScanner(false);
|
||||
}
|
||||
|
||||
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
|
||||
conf.setCodec(TestUtil.getDefaultCodec());
|
||||
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
|
||||
@ -71,10 +64,12 @@ public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
|
||||
if (random().nextInt(7) == 0) {
|
||||
riw.commit();
|
||||
}
|
||||
// TODO: we should make a new format with a clean header...
|
||||
// if (random().nextInt(20) == 0) {
|
||||
// riw.deleteDocuments(new Term("id", Integer.toString(i)));
|
||||
// }
|
||||
if (random().nextInt(20) == 0) {
|
||||
riw.deleteDocuments(new Term("id", Integer.toString(i)));
|
||||
}
|
||||
if (random().nextInt(15) == 0) {
|
||||
riw.updateNumericDocValue(new Term("id"), "dv", Long.valueOf(i));
|
||||
}
|
||||
}
|
||||
riw.close();
|
||||
checkHeaders(dir, new HashMap<String,String>());
|
||||
@ -82,9 +77,8 @@ public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
|
||||
}
|
||||
|
||||
private void checkHeaders(Directory dir, Map<String,String> namesToExtensions) throws IOException {
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
checkHeader(dir, sis.getSegmentsFileName(), namesToExtensions, null);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
checkHeader(dir, sis.getSegmentsFileName(), namesToExtensions, sis.getId());
|
||||
|
||||
for (SegmentCommitInfo si : sis) {
|
||||
assertNotNull(si.info.getId());
|
||||
@ -118,13 +112,8 @@ public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
|
||||
}
|
||||
// read version
|
||||
in.readInt();
|
||||
// read segment id (except for segments_N)
|
||||
if (id != null) {
|
||||
byte actualID[] = new byte[StringHelper.ID_LENGTH];
|
||||
in.readBytes(actualID, 0, actualID.length);
|
||||
assertArrayEquals("expected " + StringHelper.idToString(id) + ", got " + StringHelper.idToString(actualID), id, actualID);
|
||||
}
|
||||
|
||||
// read object id
|
||||
CodecUtil.checkIndexHeaderID(in, id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -199,12 +199,12 @@ public class TestCodecUtil extends LuceneTestCase {
|
||||
public void testSegmentHeaderLength() throws Exception {
|
||||
RAMFile file = new RAMFile();
|
||||
IndexOutput output = new RAMOutputStream(file, true);
|
||||
CodecUtil.writeSegmentHeader(output, "FooBar", 5, StringHelper.randomId(), "xyz");
|
||||
CodecUtil.writeIndexHeader(output, "FooBar", 5, StringHelper.randomId(), "xyz");
|
||||
output.writeString("this is the data");
|
||||
output.close();
|
||||
|
||||
IndexInput input = new RAMInputStream("file", file);
|
||||
input.seek(CodecUtil.segmentHeaderLength("FooBar", "xyz"));
|
||||
input.seek(CodecUtil.indexHeaderLength("FooBar", "xyz"));
|
||||
assertEquals("this is the data", input.readString());
|
||||
input.close();
|
||||
}
|
||||
@ -217,7 +217,7 @@ public class TestCodecUtil extends LuceneTestCase {
|
||||
RAMFile file = new RAMFile();
|
||||
IndexOutput output = new RAMOutputStream(file, true);
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(output, "foobar", 5, StringHelper.randomId(), tooLong.toString());
|
||||
CodecUtil.writeIndexHeader(output, "foobar", 5, StringHelper.randomId(), tooLong.toString());
|
||||
fail("didn't get expected exception");
|
||||
} catch (IllegalArgumentException expected) {
|
||||
// expected
|
||||
@ -232,13 +232,13 @@ public class TestCodecUtil extends LuceneTestCase {
|
||||
RAMFile file = new RAMFile();
|
||||
IndexOutput output = new RAMOutputStream(file, true);
|
||||
byte[] id = StringHelper.randomId();
|
||||
CodecUtil.writeSegmentHeader(output, "foobar", 5, id, justLongEnough.toString());
|
||||
CodecUtil.writeIndexHeader(output, "foobar", 5, id, justLongEnough.toString());
|
||||
output.close();
|
||||
|
||||
IndexInput input = new RAMInputStream("file", file);
|
||||
CodecUtil.checkSegmentHeader(input, "foobar", 5, 5, id, justLongEnough.toString());
|
||||
CodecUtil.checkIndexHeader(input, "foobar", 5, 5, id, justLongEnough.toString());
|
||||
assertEquals(input.getFilePointer(), input.length());
|
||||
assertEquals(input.getFilePointer(), CodecUtil.segmentHeaderLength("foobar", justLongEnough.toString()));
|
||||
assertEquals(input.getFilePointer(), CodecUtil.indexHeaderLength("foobar", justLongEnough.toString()));
|
||||
input.close();
|
||||
}
|
||||
|
||||
@ -246,7 +246,7 @@ public class TestCodecUtil extends LuceneTestCase {
|
||||
RAMFile file = new RAMFile();
|
||||
IndexOutput output = new RAMOutputStream(file, true);
|
||||
try {
|
||||
CodecUtil.writeSegmentHeader(output, "foobar", 5, StringHelper.randomId(), "\u1234");
|
||||
CodecUtil.writeIndexHeader(output, "foobar", 5, StringHelper.randomId(), "\u1234");
|
||||
fail("didn't get expected exception");
|
||||
} catch (IllegalArgumentException expected) {
|
||||
// expected
|
||||
|
@ -64,8 +64,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(2, sis.size());
|
||||
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
@ -82,8 +81,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
|
||||
FieldInfos fis3 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
@ -130,8 +128,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
writer.addIndexes(dir2);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir1);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir1);
|
||||
assertEquals(2, sis.size());
|
||||
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
@ -161,8 +158,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
d.add(new TextField("f2", "d1 second field", Field.Store.YES));
|
||||
writer.addDocument(d);
|
||||
writer.close();
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
assertEquals("f1", fis1.fieldInfo(0).name);
|
||||
@ -178,8 +174,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
d.add(new StoredField("f3", new byte[] { 1, 2, 3 }));
|
||||
writer.addDocument(d);
|
||||
writer.close();
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(2, sis.size());
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
FieldInfos fis2 = IndexWriter.readFieldInfos(sis.info(1));
|
||||
@ -199,8 +194,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
d.add(new StoredField("f3", new byte[] { 1, 2, 3, 4, 5 }));
|
||||
writer.addDocument(d);
|
||||
writer.close();
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(3, sis.size());
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
FieldInfos fis2 = IndexWriter.readFieldInfos(sis.info(1));
|
||||
@ -231,8 +225,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
FieldInfos fis1 = IndexWriter.readFieldInfos(sis.info(0));
|
||||
assertEquals("f1", fis1.fieldInfo(0).name);
|
||||
@ -269,8 +262,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
for (SegmentCommitInfo si : sis) {
|
||||
FieldInfos fis = IndexWriter.readFieldInfos(si);
|
||||
|
||||
|
@ -285,8 +285,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
||||
// if we are on a filesystem that seems to have only
|
||||
// 1 second resolution, allow +1 second in commit
|
||||
// age tolerance:
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir, fileName);
|
||||
SegmentInfos sis = SegmentInfos.readCommit(dir, fileName);
|
||||
long modTime = Long.parseLong(sis.getUserData().get("commitTime"));
|
||||
oneSecondResolution &= (modTime % 1000) == 0;
|
||||
final long leeway = (long) ((SECONDS + (oneSecondResolution ? 1.0:0.0))*1000);
|
||||
|
@ -664,8 +664,7 @@ public void testFilesOpenClose() throws IOException {
|
||||
addDocumentWithFields(writer);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(d);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(d);
|
||||
DirectoryReader r = DirectoryReader.open(d);
|
||||
IndexCommit c = r.getIndexCommit();
|
||||
|
||||
|
@ -85,8 +85,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// read in index to try to not depend on codec-specific filenames so much
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
SegmentInfo si0 = sis.info(0).info;
|
||||
SegmentInfo si1 = sis.info(1).info;
|
||||
SegmentInfo si3 = sis.info(3).info;
|
||||
@ -123,10 +122,6 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
|
||||
// Create a bogus fnm file when the CFS already exists:
|
||||
copyFile(dir, cfsFiles0[0], "_0.fnm");
|
||||
|
||||
// Create some old segments file:
|
||||
copyFile(dir, "segments_2", "segments");
|
||||
copyFile(dir, "segments_2", "segments_1");
|
||||
|
||||
// Create a bogus cfs file shadowing a non-cfs segment:
|
||||
|
||||
@ -143,8 +138,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
|
||||
String[] filesPre = dir.listAll();
|
||||
|
||||
// Open & close a writer: it should delete the above 4
|
||||
// files and nothing more:
|
||||
// Open & close a writer: it should delete the above files and nothing more:
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
writer.close();
|
||||
@ -265,8 +259,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
// empty commit
|
||||
new IndexWriter(dir, new IndexWriterConfig(null)).close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.getGeneration());
|
||||
|
||||
// no inflation
|
||||
@ -283,8 +276,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
// empty commit
|
||||
new IndexWriter(dir, new IndexWriterConfig(null)).close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.getGeneration());
|
||||
|
||||
// add trash commit
|
||||
@ -308,8 +300,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
// empty commit
|
||||
new IndexWriter(dir, new IndexWriterConfig(null)).close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(0, sis.counter);
|
||||
|
||||
// no inflation
|
||||
@ -333,8 +324,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
iw.addDocument(new Document());
|
||||
iw.commit();
|
||||
iw.close();
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals("_4", sis.info(0).info.name);
|
||||
assertEquals(5, sis.counter);
|
||||
|
||||
@ -351,8 +341,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
iw.close();
|
||||
|
||||
// no deletes: start at 1
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.info(0).getNextDelGen());
|
||||
|
||||
// no inflation
|
||||
@ -376,8 +365,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
// empty commit
|
||||
new IndexWriter(dir, new IndexWriterConfig(null)).close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.getGeneration());
|
||||
|
||||
// add trash file
|
||||
@ -400,8 +388,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
|
||||
iw.close();
|
||||
|
||||
// no deletes: start at 1
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.info(0).getNextDelGen());
|
||||
|
||||
// add trash file
|
||||
|
@ -575,8 +575,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||
writer.addDocument(doc);
|
||||
writer.flush(false, true);
|
||||
writer.close();
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
// Since we flushed w/o allowing merging we should now
|
||||
// have 10 segments
|
||||
assertEquals(10, sis.size());
|
||||
@ -2767,8 +2766,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||
w.addDocument(new Document());
|
||||
w.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(d);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(d);
|
||||
byte[] id1 = sis.getId();
|
||||
assertNotNull(id1);
|
||||
assertEquals(StringHelper.ID_LENGTH, id1.length);
|
||||
|
@ -1236,8 +1236,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
|
||||
|
||||
boolean corrupted = false;
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
for (SegmentCommitInfo si : sis) {
|
||||
assertTrue(si.info.getUseCompoundFile());
|
||||
String cfsFiles[] = si.info.getCodec().compoundFormat().files(si.info);
|
||||
@ -1314,8 +1313,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||
w.close();
|
||||
IndexReader reader = DirectoryReader.open(dir);
|
||||
assertTrue(reader.numDocs() > 0);
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
for(LeafReaderContext context : reader.leaves()) {
|
||||
assertFalse(context.reader().getFieldInfos().hasVectors());
|
||||
}
|
||||
@ -1682,7 +1680,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
|
||||
if (doFail && name.startsWith("segments_")) {
|
||||
StackTraceElement[] trace = new Exception().getStackTrace();
|
||||
for (int i = 0; i < trace.length; i++) {
|
||||
if ("read".equals(trace[i].getMethodName())) {
|
||||
if ("readCommit".equals(trace[i].getMethodName()) || "readLatestCommit".equals(trace[i].getMethodName())) {
|
||||
throw new UnsupportedOperationException("expected UOE");
|
||||
}
|
||||
}
|
||||
|
@ -48,8 +48,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
final int segCount = sis.size();
|
||||
|
||||
ldmp = new LogDocMergePolicy();
|
||||
@ -59,8 +58,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(3);
|
||||
writer.close();
|
||||
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
final int optSegCount = sis.size();
|
||||
|
||||
if (segCount < 3)
|
||||
@ -93,16 +91,14 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
||||
writer.waitForMerges();
|
||||
writer.commit();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
|
||||
final int segCount = sis.size();
|
||||
writer.forceMerge(7);
|
||||
writer.commit();
|
||||
writer.waitForMerges();
|
||||
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
final int optSegCount = sis.size();
|
||||
|
||||
if (segCount < 7)
|
||||
@ -226,8 +222,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
|
||||
assertTrue(reader.leaves().size() > 1);
|
||||
reader.close();
|
||||
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(2, infos.size());
|
||||
}
|
||||
}
|
||||
|
@ -34,8 +34,10 @@ import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.IOContext;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
|
||||
public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
|
||||
@ -331,7 +333,8 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
|
||||
String segName = IndexFileNames.parseSegmentName(fileName);
|
||||
if (segSeen.contains(segName) == false) {
|
||||
segSeen.add(segName);
|
||||
SegmentInfo si = TestUtil.getDefaultCodec().segmentInfoFormat().read(dir, segName, IOContext.DEFAULT);
|
||||
byte id[] = readSegmentInfoID(dir, fileName);
|
||||
SegmentInfo si = TestUtil.getDefaultCodec().segmentInfoFormat().read(dir, segName, id, IOContext.DEFAULT);
|
||||
si.setCodec(codec);
|
||||
SegmentCommitInfo sci = new SegmentCommitInfo(si, 0, -1, -1, -1);
|
||||
SegmentReader sr = new SegmentReader(sci, IOContext.DEFAULT);
|
||||
@ -349,4 +352,17 @@ public class TestIndexWriterThreadsToSegments extends LuceneTestCase {
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
// TODO: remove this hack and fix this test to be better?
|
||||
// the whole thing relies on default codec too...
|
||||
byte[] readSegmentInfoID(Directory dir, String file) throws IOException {
|
||||
try (IndexInput in = dir.openInput(file, IOContext.DEFAULT)) {
|
||||
in.readInt(); // magic
|
||||
in.readString(); // codec name
|
||||
in.readInt(); // version
|
||||
byte id[] = new byte[StringHelper.ID_LENGTH];
|
||||
in.readBytes(id, 0, id.length);
|
||||
return id;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -143,8 +143,7 @@ public class TestRollingUpdates extends LuceneTestCase {
|
||||
docs.close();
|
||||
|
||||
// LUCENE-4455:
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
long totalBytes = 0;
|
||||
for(SegmentCommitInfo sipc : infos) {
|
||||
totalBytes += sipc.sizeInBytes();
|
||||
|
@ -66,8 +66,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
}
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
double min = sis.info(0).sizeInBytes();
|
||||
|
||||
conf = newWriterConfig();
|
||||
@ -80,8 +79,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// Should only be 3 segments in the index, because one of them exceeds the size limit
|
||||
sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(3, sis.size());
|
||||
}
|
||||
|
||||
@ -113,8 +111,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// Should only be 3 segments in the index, because one of them exceeds the size limit
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(3, sis.size());
|
||||
}
|
||||
|
||||
@ -140,8 +137,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(2, sis.size());
|
||||
}
|
||||
|
||||
@ -167,8 +163,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(2, sis.size());
|
||||
}
|
||||
|
||||
@ -194,8 +189,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
}
|
||||
|
||||
@ -220,8 +214,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(3, sis.size());
|
||||
}
|
||||
|
||||
@ -247,8 +240,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.forceMerge(1);
|
||||
writer.close();
|
||||
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(4, sis.size());
|
||||
}
|
||||
|
||||
@ -280,8 +272,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
|
||||
// Should only be 4 segments in the index, because of the merge factor and
|
||||
// max merge docs settings.
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(4, sis.size());
|
||||
}
|
||||
|
||||
@ -309,8 +300,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// Verify that the last segment does not have deletions.
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(3, sis.size());
|
||||
assertFalse(sis.info(2).hasDeletions());
|
||||
}
|
||||
@ -335,8 +325,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// Verify that the last segment does not have deletions.
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
}
|
||||
|
||||
@ -363,8 +352,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
|
||||
writer.close();
|
||||
|
||||
// Verify that the last segment does not have deletions.
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(dir);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
|
||||
assertEquals(1, sis.size());
|
||||
assertTrue(sis.info(0).hasDeletions());
|
||||
}
|
||||
|
@ -131,8 +131,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
|
||||
|
||||
/** Reads the commit data from a Directory. */
|
||||
private static Map<String, String> readCommitData(Directory dir) throws IOException {
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
return infos.getUserData();
|
||||
}
|
||||
|
||||
|
@ -324,8 +324,7 @@ public class TestDirectoryTaxonomyWriter extends FacetTestCase {
|
||||
}
|
||||
|
||||
private long getEpoch(Directory taxoDir) throws IOException {
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(taxoDir);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(taxoDir);
|
||||
return Long.parseLong(infos.getUserData().get(DirectoryTaxonomyWriter.INDEX_EPOCH));
|
||||
}
|
||||
|
||||
|
@ -90,8 +90,7 @@ public class IndexSplitter {
|
||||
public IndexSplitter(Path dir) throws IOException {
|
||||
this.dir = dir;
|
||||
fsDir = FSDirectory.open(dir);
|
||||
infos = new SegmentInfos();
|
||||
infos.read(fsDir);
|
||||
infos = SegmentInfos.readLatestCommit(fsDir);
|
||||
}
|
||||
|
||||
public void listSegments() throws IOException {
|
||||
|
@ -78,8 +78,7 @@ public class TestIndexSplitter extends LuceneTestCase {
|
||||
Path destDir2 = createTempDir(LuceneTestCase.getTestClass().getSimpleName());
|
||||
IndexSplitter.main(new String[] {dir.toAbsolutePath().toString(), destDir2.toAbsolutePath().toString(), splitSegName});
|
||||
Directory fsDirDest2 = newFSDirectory(destDir2);
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
sis.read(fsDirDest2);
|
||||
SegmentInfos sis = SegmentInfos.readLatestCommit(fsDirDest2);
|
||||
assertEquals(1, sis.size());
|
||||
r = DirectoryReader.open(fsDirDest2);
|
||||
assertEquals(50, r.maxDoc());
|
||||
|
@ -37,7 +37,7 @@ final class IDVersionPostingsReader extends PostingsReaderBase {
|
||||
@Override
|
||||
public void init(IndexInput termsIn, SegmentReadState state) throws IOException {
|
||||
// Make sure we are talking to the matching postings writer
|
||||
CodecUtil.checkSegmentHeader(termsIn,
|
||||
CodecUtil.checkIndexHeader(termsIn,
|
||||
IDVersionPostingsWriter.TERMS_CODEC,
|
||||
IDVersionPostingsWriter.VERSION_START,
|
||||
IDVersionPostingsWriter.VERSION_CURRENT,
|
||||
|
@ -57,7 +57,7 @@ final class IDVersionPostingsWriter extends PushPostingsWriterBase {
|
||||
|
||||
@Override
|
||||
public void init(IndexOutput termsOut, SegmentWriteState state) throws IOException {
|
||||
CodecUtil.writeSegmentHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -72,7 +72,7 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
|
||||
IndexInput indexIn = null;
|
||||
|
||||
try {
|
||||
int termsVersion = CodecUtil.checkSegmentHeader(in, VersionBlockTreeTermsWriter.TERMS_CODEC_NAME,
|
||||
int termsVersion = CodecUtil.checkIndexHeader(in, VersionBlockTreeTermsWriter.TERMS_CODEC_NAME,
|
||||
VersionBlockTreeTermsWriter.VERSION_START,
|
||||
VersionBlockTreeTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
@ -81,7 +81,7 @@ public final class VersionBlockTreeTermsReader extends FieldsProducer {
|
||||
state.segmentSuffix,
|
||||
VersionBlockTreeTermsWriter.TERMS_INDEX_EXTENSION);
|
||||
indexIn = state.directory.openInput(indexFile, state.context);
|
||||
int indexVersion = CodecUtil.checkSegmentHeader(indexIn, VersionBlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
int indexVersion = CodecUtil.checkIndexHeader(indexIn, VersionBlockTreeTermsWriter.TERMS_INDEX_CODEC_NAME,
|
||||
VersionBlockTreeTermsWriter.VERSION_START,
|
||||
VersionBlockTreeTermsWriter.VERSION_CURRENT,
|
||||
state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
@ -199,13 +199,13 @@ public final class VersionBlockTreeTermsWriter extends FieldsConsumer {
|
||||
fieldInfos = state.fieldInfos;
|
||||
this.minItemsInBlock = minItemsInBlock;
|
||||
this.maxItemsInBlock = maxItemsInBlock;
|
||||
CodecUtil.writeSegmentHeader(out, TERMS_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(out, TERMS_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
//DEBUG = state.segmentName.equals("_4a");
|
||||
|
||||
final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION);
|
||||
indexOut = state.directory.createOutput(termsIndexFileName, state.context);
|
||||
CodecUtil.writeSegmentHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
CodecUtil.writeIndexHeader(indexOut, TERMS_INDEX_CODEC_NAME, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
|
||||
|
||||
this.postingsWriter = postingsWriter;
|
||||
// segment = state.segmentInfo.name;
|
||||
|
@ -35,8 +35,8 @@ class CrankySegmentInfoFormat extends SegmentInfoFormat {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException {
|
||||
return delegate.read(directory, segmentName, context);
|
||||
public SegmentInfo read(Directory directory, String segmentName, byte[] segmentID, IOContext context) throws IOException {
|
||||
return delegate.read(directory, segmentName, segmentID, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -197,8 +197,7 @@ public abstract class BaseCompoundFormatTestCase extends BaseIndexFileFormatTest
|
||||
}
|
||||
}
|
||||
riw.close();
|
||||
SegmentInfos infos = new SegmentInfos();
|
||||
infos.read(dir);
|
||||
SegmentInfos infos = SegmentInfos.readLatestCommit(dir);
|
||||
for (SegmentCommitInfo si : infos) {
|
||||
if (si.info.getUseCompoundFile()) {
|
||||
try (Directory cfsDir = si.info.getCodec().compoundFormat().getCompoundReader(dir, si.info, newIOContext(random()))) {
|
||||
|
@ -47,11 +47,12 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
public void testFiles() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), StringHelper.randomId());
|
||||
Collections.<String,String>emptyMap(), id);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(info.files(), info2.files());
|
||||
dir.close();
|
||||
}
|
||||
@ -60,8 +61,9 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
public void testAddsSelfToFiles() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), StringHelper.randomId());
|
||||
Collections.<String,String>emptyMap(), id);
|
||||
Set<String> originalFiles = Collections.singleton("_123.a");
|
||||
info.setFiles(originalFiles);
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
@ -70,7 +72,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
assertTrue(modifiedFiles.containsAll(originalFiles));
|
||||
assertTrue("did you forget to add yourself to files()", modifiedFiles.size() > originalFiles.size());
|
||||
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(info.files(), info2.files());
|
||||
dir.close();
|
||||
}
|
||||
@ -79,14 +81,15 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
public void testDiagnostics() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
Codec codec = getCodec();
|
||||
byte id[] = StringHelper.randomId();
|
||||
Map<String,String> diagnostics = new HashMap<>();
|
||||
diagnostics.put("key1", "value1");
|
||||
diagnostics.put("key2", "value2");
|
||||
SegmentInfo info = new SegmentInfo(dir, getVersions()[0], "_123", 1, false, codec,
|
||||
diagnostics, StringHelper.randomId());
|
||||
diagnostics, id);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(diagnostics, info2.getDiagnostics());
|
||||
dir.close();
|
||||
}
|
||||
@ -100,7 +103,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
Collections.<String,String>emptyMap(), id);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertIDEquals(id, info2.getId());
|
||||
dir.close();
|
||||
}
|
||||
@ -110,11 +113,12 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
Codec codec = getCodec();
|
||||
for (Version v : getVersions()) {
|
||||
Directory dir = newDirectory();
|
||||
byte id[] = StringHelper.randomId();
|
||||
SegmentInfo info = new SegmentInfo(dir, v, "_123", 1, false, codec,
|
||||
Collections.<String,String>emptyMap(), StringHelper.randomId());
|
||||
Collections.<String,String>emptyMap(), id);
|
||||
info.setFiles(Collections.<String>emptySet());
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, "_123", id, IOContext.DEFAULT);
|
||||
assertEquals(info2.getVersion(), v);
|
||||
dir.close();
|
||||
}
|
||||
@ -152,7 +156,7 @@ public abstract class BaseSegmentInfoFormatTestCase extends BaseIndexFileFormatT
|
||||
SegmentInfo info = new SegmentInfo(dir, version, name, docCount, isCompoundFile, codec, diagnostics, id);
|
||||
info.setFiles(files);
|
||||
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, IOContext.DEFAULT);
|
||||
SegmentInfo info2 = codec.segmentInfoFormat().read(dir, name, id, IOContext.DEFAULT);
|
||||
assertEquals(info, info2);
|
||||
|
||||
dir.close();
|
||||
|
@ -800,11 +800,12 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
|
||||
if (LuceneTestCase.VERBOSE) {
|
||||
System.out.println("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete.");
|
||||
}
|
||||
SegmentInfos sis = new SegmentInfos();
|
||||
SegmentInfos sis;
|
||||
try {
|
||||
sis.read(in, file);
|
||||
sis = SegmentInfos.readCommit(in, file);
|
||||
} catch (IOException ioe) {
|
||||
// OK: likely some of the .si files were deleted
|
||||
sis = new SegmentInfos();
|
||||
}
|
||||
|
||||
try {
|
||||
|
Loading…
x
Reference in New Issue
Block a user