LUCENE-9456: Move metadata about stored fields to the meta file. (#1752)

Stored fields have a metadata file, but it currently only records
metadata about the index, not the actual data. This commit moves all
metadata to the metadata file.
This commit is contained in:
Adrien Grand 2020-08-26 10:27:03 +02:00 committed by GitHub
parent f4c4fbcafc
commit e6fa5a5fc4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 146 additions and 83 deletions

View File

@ -24,7 +24,9 @@ import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.HOUR;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.HOUR_ENCODING;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.INDEX_CODEC_NAME;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.INDEX_EXTENSION_PREFIX;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.INDEX_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.META_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.META_VERSION_START;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.NUMERIC_DOUBLE;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.NUMERIC_FLOAT;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.NUMERIC_INT;
@ -35,6 +37,7 @@ import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.TYPE_BITS;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.TYPE_MASK;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_CURRENT;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_META;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_OFFHEAP_INDEX;
import static org.apache.lucene.codecs.compressing.CompressingStoredFieldsWriter.VERSION_START;
@ -120,14 +123,26 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
numDocs = si.maxDoc();
final String fieldsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION);
ChecksumIndexInput metaIn = null;
try {
// Open the data file and read metadata
// Open the data file
fieldsStream = d.openInput(fieldsStreamFN, context);
version = CodecUtil.checkIndexHeader(fieldsStream, formatName, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == fieldsStream.getFilePointer();
chunkSize = fieldsStream.readVInt();
packedIntsVersion = fieldsStream.readVInt();
if (version >= VERSION_OFFHEAP_INDEX) {
final String metaStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, META_EXTENSION);
metaIn = d.openChecksumInput(metaStreamFN, IOContext.READONCE);
CodecUtil.checkIndexHeader(metaIn, INDEX_CODEC_NAME + "Meta", META_VERSION_START, version, si.getId(), segmentSuffix);
}
if (version >= VERSION_META) {
chunkSize = metaIn.readVInt();
packedIntsVersion = metaIn.readVInt();
} else {
chunkSize = fieldsStream.readVInt();
packedIntsVersion = fieldsStream.readVInt();
}
decompressor = compressionMode.newDecompressor();
this.merging = false;
this.state = new BlockState();
@ -163,7 +178,7 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
}
}
} else {
FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId());
FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, INDEX_EXTENSION, INDEX_CODEC_NAME, si.getId(), metaIn);
indexReader = fieldsIndexReader;
maxPointer = fieldsIndexReader.getMaxPointer();
}
@ -171,17 +186,34 @@ public final class CompressingStoredFieldsReader extends StoredFieldsReader {
this.maxPointer = maxPointer;
this.indexReader = indexReader;
fieldsStream.seek(maxPointer);
numChunks = fieldsStream.readVLong();
numDirtyChunks = fieldsStream.readVLong();
if (version >= VERSION_META) {
numChunks = metaIn.readVLong();
numDirtyChunks = metaIn.readVLong();
} else {
fieldsStream.seek(maxPointer);
numChunks = fieldsStream.readVLong();
numDirtyChunks = fieldsStream.readVLong();
}
if (numDirtyChunks > numChunks) {
throw new CorruptIndexException("invalid chunk counts: dirty=" + numDirtyChunks + ", total=" + numChunks, fieldsStream);
}
if (metaIn != null) {
CodecUtil.checkFooter(metaIn, null);
metaIn.close();
}
success = true;
} catch (Throwable t) {
if (metaIn != null) {
CodecUtil.checkFooter(metaIn, t);
throw new AssertionError("unreachable");
} else {
throw t;
}
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this);
IOUtils.closeWhileHandlingException(this, metaIn);
}
}
}

View File

@ -57,7 +57,9 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
/** Extension of stored fields file */
public static final String FIELDS_EXTENSION = "fdt";
/** Extension of stored fields index */
public static final String INDEX_EXTENSION_PREFIX = "fd";
public static final String INDEX_EXTENSION = "fdx";
/** Extension of stored fields meta */
public static final String META_EXTENSION = "fdm";
/** Codec name for the index. */
public static final String INDEX_CODEC_NAME = "Lucene85FieldsIndex";
@ -73,11 +75,14 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
static final int VERSION_START = 1;
static final int VERSION_OFFHEAP_INDEX = 2;
static final int VERSION_CURRENT = VERSION_OFFHEAP_INDEX;
/** Version where all metadata were moved to the meta file. */
static final int VERSION_META = 3;
static final int VERSION_CURRENT = VERSION_META;
static final int META_VERSION_START = 0;
private final String segment;
private FieldsIndexWriter indexWriter;
private IndexOutput fieldsStream;
private IndexOutput metaStream, fieldsStream;
private Compressor compressor;
private final CompressionMode compressionMode;
@ -110,19 +115,23 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
boolean success = false;
try {
metaStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, META_EXTENSION), context);
CodecUtil.writeIndexHeader(metaStream, INDEX_CODEC_NAME + "Meta", VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(INDEX_CODEC_NAME + "Meta", segmentSuffix) == metaStream.getFilePointer();
fieldsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
CodecUtil.writeIndexHeader(fieldsStream, formatName, VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == fieldsStream.getFilePointer();
indexWriter = new FieldsIndexWriter(directory, segment, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId(), blockShift, context);
indexWriter = new FieldsIndexWriter(directory, segment, segmentSuffix, INDEX_EXTENSION, INDEX_CODEC_NAME, si.getId(), blockShift, context);
fieldsStream.writeVInt(chunkSize);
fieldsStream.writeVInt(PackedInts.VERSION_CURRENT);
metaStream.writeVInt(chunkSize);
metaStream.writeVInt(PackedInts.VERSION_CURRENT);
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(fieldsStream, indexWriter);
IOUtils.closeWhileHandlingException(metaStream, fieldsStream, indexWriter);
}
}
}
@ -130,8 +139,9 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
@Override
public void close() throws IOException {
try {
IOUtils.close(fieldsStream, indexWriter, compressor);
IOUtils.close(metaStream, fieldsStream, indexWriter, compressor);
} finally {
metaStream = null;
fieldsStream = null;
indexWriter = null;
compressor = null;
@ -466,9 +476,10 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
if (docBase != numDocs) {
throw new RuntimeException("Wrote " + docBase + " docs, finish called with numDocs=" + numDocs);
}
indexWriter.finish(numDocs, fieldsStream.getFilePointer());
fieldsStream.writeVLong(numChunks);
fieldsStream.writeVLong(numDirtyChunks);
indexWriter.finish(numDocs, fieldsStream.getFilePointer(), metaStream);
metaStream.writeVLong(numChunks);
metaStream.writeVLong(numDirtyChunks);
CodecUtil.writeFooter(metaStream);
CodecUtil.writeFooter(fieldsStream);
assert bufferedDocs.size() == 0;
}

View File

@ -55,14 +55,17 @@ import org.apache.lucene.util.packed.PackedInts;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_OFFHEAP_INDEX;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.FLAGS_BITS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.META_VERSION_START;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.OFFSETS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.PACKED_BLOCK_SIZE;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.PAYLOADS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.POSITIONS;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_INDEX_CODEC_NAME;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION_PREFIX;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_INDEX_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VECTORS_META_EXTENSION;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_CURRENT;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_META;
import static org.apache.lucene.codecs.compressing.CompressingTermVectorsWriter.VERSION_START;
/**
@ -113,13 +116,34 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
fieldInfos = fn;
numDocs = si.maxDoc();
ChecksumIndexInput metaIn = null;
try {
// Open the data file and read metadata
// Open the data file
final String vectorsStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION);
vectorsStream = d.openInput(vectorsStreamFN, context);
version = CodecUtil.checkIndexHeader(vectorsStream, formatName, VERSION_START, VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == vectorsStream.getFilePointer();
if (version >= VERSION_OFFHEAP_INDEX) {
final String metaStreamFN = IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_META_EXTENSION);
metaIn = d.openChecksumInput(metaStreamFN, IOContext.READONCE);
CodecUtil.checkIndexHeader(metaIn, VECTORS_INDEX_CODEC_NAME + "Meta", META_VERSION_START, version, si.getId(), segmentSuffix);
}
if (version >= VERSION_META) {
packedIntsVersion = metaIn.readVInt();
chunkSize = metaIn.readVInt();
} else {
packedIntsVersion = vectorsStream.readVInt();
chunkSize = vectorsStream.readVInt();
}
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(vectorsStream);
FieldsIndex indexReader = null;
long maxPointer = -1;
@ -145,7 +169,7 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
}
}
} else {
FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId());
FieldsIndexReader fieldsIndexReader = new FieldsIndexReader(d, si.name, segmentSuffix, VECTORS_INDEX_EXTENSION, VECTORS_INDEX_CODEC_NAME, si.getId(), metaIn);
indexReader = fieldsIndexReader;
maxPointer = fieldsIndexReader.getMaxPointer();
}
@ -153,30 +177,37 @@ public final class CompressingTermVectorsReader extends TermVectorsReader implem
this.indexReader = indexReader;
this.maxPointer = maxPointer;
long pos = vectorsStream.getFilePointer();
vectorsStream.seek(maxPointer);
numChunks = vectorsStream.readVLong();
numDirtyChunks = vectorsStream.readVLong();
if (version >= VERSION_META) {
numChunks = metaIn.readVLong();
numDirtyChunks = metaIn.readVLong();
} else {
vectorsStream.seek(maxPointer);
numChunks = vectorsStream.readVLong();
numDirtyChunks = vectorsStream.readVLong();
}
if (numDirtyChunks > numChunks) {
throw new CorruptIndexException("invalid chunk counts: dirty=" + numDirtyChunks + ", total=" + numChunks, vectorsStream);
}
// NOTE: data file is too costly to verify checksum against all the bytes on open,
// but for now we at least verify proper structure of the checksum footer: which looks
// for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption
// such as file truncation.
CodecUtil.retrieveChecksum(vectorsStream);
vectorsStream.seek(pos);
packedIntsVersion = vectorsStream.readVInt();
chunkSize = vectorsStream.readVInt();
decompressor = compressionMode.newDecompressor();
this.reader = new BlockPackedReaderIterator(vectorsStream, packedIntsVersion, PACKED_BLOCK_SIZE, 0);
if (metaIn != null) {
CodecUtil.checkFooter(metaIn, null);
metaIn.close();
}
success = true;
} catch (Throwable t) {
if (metaIn != null) {
CodecUtil.checkFooter(metaIn, t);
throw new AssertionError("unreachable");
} else {
throw t;
}
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(this);
IOUtils.closeWhileHandlingException(this, metaIn);
}
}
}

View File

@ -59,12 +59,16 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
static final int MAX_DOCUMENTS_PER_CHUNK = 128;
static final String VECTORS_EXTENSION = "tvd";
static final String VECTORS_INDEX_EXTENSION_PREFIX = "tv";
static final String VECTORS_INDEX_EXTENSION = "tvx";
static final String VECTORS_META_EXTENSION = "tvm";
static final String VECTORS_INDEX_CODEC_NAME = "Lucene85TermVectorsIndex";
static final int VERSION_START = 1;
static final int VERSION_OFFHEAP_INDEX = 2;
static final int VERSION_CURRENT = VERSION_OFFHEAP_INDEX;
/** Version where all metadata were moved to the meta file. */
static final int VERSION_META = 3;
static final int VERSION_CURRENT = VERSION_META;
static final int META_VERSION_START = 0;
static final int PACKED_BLOCK_SIZE = 64;
@ -75,7 +79,7 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
private final String segment;
private FieldsIndexWriter indexWriter;
private IndexOutput vectorsStream;
private IndexOutput metaStream, vectorsStream;
private final CompressionMode compressionMode;
private final Compressor compressor;
@ -218,15 +222,19 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
boolean success = false;
try {
metaStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_META_EXTENSION), context);
CodecUtil.writeIndexHeader(metaStream, VECTORS_INDEX_CODEC_NAME + "Meta", VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(VECTORS_INDEX_CODEC_NAME + "Meta", segmentSuffix) == metaStream.getFilePointer();
vectorsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION),
context);
CodecUtil.writeIndexHeader(vectorsStream, formatName, VERSION_CURRENT, si.getId(), segmentSuffix);
assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == vectorsStream.getFilePointer();
indexWriter = new FieldsIndexWriter(directory, segment, segmentSuffix, VECTORS_INDEX_EXTENSION_PREFIX, VECTORS_INDEX_CODEC_NAME, si.getId(), blockShift, context);
indexWriter = new FieldsIndexWriter(directory, segment, segmentSuffix, VECTORS_INDEX_EXTENSION, VECTORS_INDEX_CODEC_NAME, si.getId(), blockShift, context);
vectorsStream.writeVInt(PackedInts.VERSION_CURRENT);
vectorsStream.writeVInt(chunkSize);
metaStream.writeVInt(PackedInts.VERSION_CURRENT);
metaStream.writeVInt(chunkSize);
writer = new BlockPackedWriter(vectorsStream, PACKED_BLOCK_SIZE);
positionsBuf = new int[1024];
@ -237,7 +245,7 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(vectorsStream, indexWriter, indexWriter);
IOUtils.closeWhileHandlingException(metaStream, vectorsStream, indexWriter, indexWriter);
}
}
}
@ -245,8 +253,9 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
@Override
public void close() throws IOException {
try {
IOUtils.close(vectorsStream, indexWriter);
IOUtils.close(metaStream, vectorsStream, indexWriter);
} finally {
metaStream = null;
vectorsStream = null;
indexWriter = null;
}
@ -644,9 +653,10 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
if (numDocs != this.numDocs) {
throw new RuntimeException("Wrote " + this.numDocs + " docs, finish called with numDocs=" + numDocs);
}
indexWriter.finish(numDocs, vectorsStream.getFilePointer());
vectorsStream.writeVLong(numChunks);
vectorsStream.writeVLong(numDirtyChunks);
indexWriter.finish(numDocs, vectorsStream.getFilePointer(), metaStream);
metaStream.writeVLong(numChunks);
metaStream.writeVLong(numDirtyChunks);
CodecUtil.writeFooter(metaStream);
CodecUtil.writeFooter(vectorsStream);
}

View File

@ -16,8 +16,6 @@
*/
package org.apache.lucene.codecs.compressing;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.FIELDS_INDEX_EXTENSION_SUFFIX;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.FIELDS_META_EXTENSION_SUFFIX;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.VERSION_CURRENT;
import static org.apache.lucene.codecs.compressing.FieldsIndexWriter.VERSION_START;
@ -27,7 +25,6 @@ import java.util.Objects;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -49,26 +46,18 @@ final class FieldsIndexReader extends FieldsIndex {
private final DirectMonotonicReader docs, startPointers;
private final long maxPointer;
FieldsIndexReader(Directory dir, String name, String suffix, String extensionPrefix, String codecName, byte[] id) throws IOException {
try (ChecksumIndexInput metaIn = dir.openChecksumInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_META_EXTENSION_SUFFIX), IOContext.READONCE)) {
Throwable priorE = null;
try {
CodecUtil.checkIndexHeader(metaIn, codecName + "Meta", VERSION_START, VERSION_CURRENT, id, suffix);
maxDoc = metaIn.readInt();
blockShift = metaIn.readInt();
numChunks = metaIn.readInt();
docsStartPointer = metaIn.readLong();
docsMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift);
docsEndPointer = startPointersStartPointer = metaIn.readLong();
startPointersMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift);
startPointersEndPointer = metaIn.readLong();
maxPointer = metaIn.readLong();
} finally {
CodecUtil.checkFooter(metaIn, priorE);
}
}
FieldsIndexReader(Directory dir, String name, String suffix, String extension, String codecName, byte[] id, IndexInput metaIn) throws IOException {
maxDoc = metaIn.readInt();
blockShift = metaIn.readInt();
numChunks = metaIn.readInt();
docsStartPointer = metaIn.readLong();
docsMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift);
docsEndPointer = startPointersStartPointer = metaIn.readLong();
startPointersMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift);
startPointersEndPointer = metaIn.readLong();
maxPointer = metaIn.readLong();
indexInput = dir.openInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_INDEX_EXTENSION_SUFFIX), IOContext.READ);
indexInput = dir.openInput(IndexFileNames.segmentFileName(name, suffix, extension), IOContext.READ);
boolean success = false;
try {
CodecUtil.checkIndexHeader(indexInput, codecName + "Idx", VERSION_START, VERSION_CURRENT, id, suffix);

View File

@ -46,12 +46,6 @@ import org.apache.lucene.util.packed.DirectMonotonicWriter;
*/
public final class FieldsIndexWriter implements Closeable {
/** Extension of stored fields index file. */
public static final String FIELDS_INDEX_EXTENSION_SUFFIX = "x";
/** Extension of stored fields meta file. */
public static final String FIELDS_META_EXTENSION_SUFFIX = "m";
static final int VERSION_START = 0;
static final int VERSION_CURRENT = 0;
@ -102,7 +96,7 @@ public final class FieldsIndexWriter implements Closeable {
totalChunks++;
}
void finish(int numDocs, long maxPointer) throws IOException {
void finish(int numDocs, long maxPointer, IndexOutput metaOut) throws IOException {
if (numDocs != totalDocs) {
throw new IllegalStateException("Expected " + numDocs + " docs, but got " + totalDocs);
}
@ -110,10 +104,7 @@ public final class FieldsIndexWriter implements Closeable {
CodecUtil.writeFooter(filePointersOut);
IOUtils.close(docsOut, filePointersOut);
try (IndexOutput metaOut = dir.createOutput(IndexFileNames.segmentFileName(name, suffix, extension + FIELDS_META_EXTENSION_SUFFIX), ioContext);
IndexOutput dataOut = dir.createOutput(IndexFileNames.segmentFileName(name, suffix, extension + FIELDS_INDEX_EXTENSION_SUFFIX), ioContext)) {
CodecUtil.writeIndexHeader(metaOut, codecName + "Meta", VERSION_CURRENT, id, suffix);
try (IndexOutput dataOut = dir.createOutput(IndexFileNames.segmentFileName(name, suffix, extension), ioContext)) {
CodecUtil.writeIndexHeader(dataOut, codecName + "Idx", VERSION_CURRENT, id, suffix);
metaOut.writeInt(numDocs);
@ -173,7 +164,6 @@ public final class FieldsIndexWriter implements Closeable {
metaOut.writeLong(dataOut.getFilePointer());
metaOut.writeLong(maxPointer);
CodecUtil.writeFooter(metaOut);
CodecUtil.writeFooter(dataOut);
}
}

View File

@ -1414,7 +1414,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
if (file.lastIndexOf('.') < 0
// don't count stored fields and term vectors in, or any temporary files they might
|| !Arrays.asList("fdt", "tvd", "tmp").contains(file.substring(file.lastIndexOf('.') + 1))) {
|| !Arrays.asList("fdm", "fdt", "tvm", "tvd", "tmp").contains(file.substring(file.lastIndexOf('.') + 1))) {
++computedExtraFileCount;
}
}