mirror of https://github.com/apache/lucene.git
cleanup: don't track flushedFiles when we can just compute that from the SegmentInfo
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1055435 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
87274d00ac
commit
568785de21
|
@ -64,7 +64,6 @@ final class DocFieldProcessor extends DocConsumer {
|
||||||
// FieldInfo.storePayload.
|
// FieldInfo.storePayload.
|
||||||
final String fileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELD_INFOS_EXTENSION);
|
final String fileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELD_INFOS_EXTENSION);
|
||||||
fieldInfos.write(state.directory, fileName);
|
fieldInfos.write(state.directory, fileName);
|
||||||
state.flushedFiles.add(fileName);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -599,7 +599,7 @@ final class DocumentsWriter {
|
||||||
|
|
||||||
if (infoStream != null) {
|
if (infoStream != null) {
|
||||||
message("new segment has " + (flushState.hasVectors ? "vectors" : "no vectors"));
|
message("new segment has " + (flushState.hasVectors ? "vectors" : "no vectors"));
|
||||||
message("flushedFiles=" + flushState.flushedFiles);
|
message("flushedFiles=" + newSegment.files());
|
||||||
message("flushed codecs=" + newSegment.getSegmentCodecs());
|
message("flushed codecs=" + newSegment.getSegmentCodecs());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -611,12 +611,11 @@ final class DocumentsWriter {
|
||||||
}
|
}
|
||||||
|
|
||||||
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, cfsFileName);
|
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, cfsFileName);
|
||||||
for(String fileName : flushState.flushedFiles) {
|
for(String fileName : newSegment.files()) {
|
||||||
cfsWriter.addFile(fileName);
|
cfsWriter.addFile(fileName);
|
||||||
}
|
}
|
||||||
cfsWriter.close();
|
cfsWriter.close();
|
||||||
deleter.deleteNewFiles(flushState.flushedFiles);
|
deleter.deleteNewFiles(newSegment.files());
|
||||||
|
|
||||||
newSegment.setUseCompoundFile(true);
|
newSegment.setUseCompoundFile(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2256,11 +2256,11 @@ public class IndexWriter implements Closeable {
|
||||||
// Now create the compound file if needed
|
// Now create the compound file if needed
|
||||||
if (useCompoundFile) {
|
if (useCompoundFile) {
|
||||||
merger.createCompoundFile(mergedName + ".cfs", info);
|
merger.createCompoundFile(mergedName + ".cfs", info);
|
||||||
info.setUseCompoundFile(true);
|
|
||||||
|
|
||||||
// delete new non cfs files directly: they were never
|
// delete new non cfs files directly: they were never
|
||||||
// registered with IFD
|
// registered with IFD
|
||||||
deleter.deleteNewFiles(merger.getMergedFiles(info));
|
deleter.deleteNewFiles(info.files());
|
||||||
|
info.setUseCompoundFile(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register the new segment
|
// Register the new segment
|
||||||
|
@ -3159,7 +3159,7 @@ public class IndexWriter implements Closeable {
|
||||||
|
|
||||||
synchronized(this) {
|
synchronized(this) {
|
||||||
deleter.deleteFile(compoundFileName);
|
deleter.deleteFile(compoundFileName);
|
||||||
deleter.deleteNewFiles(merger.getMergedFiles(merge.info));
|
deleter.deleteNewFiles(merge.info.files());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3170,7 +3170,7 @@ public class IndexWriter implements Closeable {
|
||||||
|
|
||||||
// delete new non cfs files directly: they were never
|
// delete new non cfs files directly: they were never
|
||||||
// registered with IFD
|
// registered with IFD
|
||||||
deleter.deleteNewFiles(merger.getMergedFiles(merge.info));
|
deleter.deleteNewFiles(merge.info.files());
|
||||||
|
|
||||||
if (merge.isAborted()) {
|
if (merge.isAborted()) {
|
||||||
if (infoStream != null) {
|
if (infoStream != null) {
|
||||||
|
|
|
@ -89,7 +89,6 @@ final class NormsWriter extends InvertedDocEndConsumer {
|
||||||
}
|
}
|
||||||
|
|
||||||
final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
|
final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
|
||||||
state.flushedFiles.add(normsFileName);
|
|
||||||
IndexOutput normsOut = state.directory.createOutput(normsFileName);
|
IndexOutput normsOut = state.directory.createOutput(normsFileName);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -20,8 +20,6 @@ package org.apache.lucene.index;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -127,41 +125,11 @@ final class SegmentMerger {
|
||||||
return mergedDocs;
|
return mergedDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
final Collection<String> getMergedFiles(final SegmentInfo info) throws IOException {
|
|
||||||
Set<String> fileSet = new HashSet<String>();
|
|
||||||
|
|
||||||
// Basic files
|
|
||||||
for (String ext : IndexFileNames.COMPOUND_EXTENSIONS_NOT_CODEC) {
|
|
||||||
fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
|
|
||||||
}
|
|
||||||
|
|
||||||
segmentWriteState.segmentCodecs.files(directory, info, fileSet);
|
|
||||||
|
|
||||||
// Fieldable norm files
|
|
||||||
int numFIs = fieldInfos.size();
|
|
||||||
for (int i = 0; i < numFIs; i++) {
|
|
||||||
FieldInfo fi = fieldInfos.fieldInfo(i);
|
|
||||||
if (fi.isIndexed && !fi.omitNorms) {
|
|
||||||
fileSet.add(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Vector files
|
|
||||||
if (fieldInfos.hasVectors()) {
|
|
||||||
for (String ext : IndexFileNames.VECTOR_EXTENSIONS) {
|
|
||||||
fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
final Collection<String> createCompoundFile(String fileName, final SegmentInfo info)
|
final Collection<String> createCompoundFile(String fileName, final SegmentInfo info)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
// Now merge all added files
|
// Now merge all added files
|
||||||
Collection<String> files = getMergedFiles(info);
|
Collection<String> files = info.files();
|
||||||
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);
|
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);
|
||||||
for (String file : files) {
|
for (String file : files) {
|
||||||
cfsWriter.addFile(file);
|
cfsWriter.addFile(file);
|
||||||
|
|
|
@ -18,8 +18,6 @@ package org.apache.lucene.index;
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.HashSet;
|
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
|
||||||
|
@ -33,7 +31,6 @@ public class SegmentWriteState {
|
||||||
public final FieldInfos fieldInfos;
|
public final FieldInfos fieldInfos;
|
||||||
public final int numDocs;
|
public final int numDocs;
|
||||||
public boolean hasVectors;
|
public boolean hasVectors;
|
||||||
public final Collection<String> flushedFiles;
|
|
||||||
|
|
||||||
final SegmentCodecs segmentCodecs;
|
final SegmentCodecs segmentCodecs;
|
||||||
public final String codecId;
|
public final String codecId;
|
||||||
|
@ -68,7 +65,6 @@ public class SegmentWriteState {
|
||||||
this.numDocs = numDocs;
|
this.numDocs = numDocs;
|
||||||
this.termIndexInterval = termIndexInterval;
|
this.termIndexInterval = termIndexInterval;
|
||||||
this.segmentCodecs = segmentCodecs;
|
this.segmentCodecs = segmentCodecs;
|
||||||
flushedFiles = new HashSet<String>();
|
|
||||||
codecId = "";
|
codecId = "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +79,6 @@ public class SegmentWriteState {
|
||||||
numDocs = state.numDocs;
|
numDocs = state.numDocs;
|
||||||
termIndexInterval = state.termIndexInterval;
|
termIndexInterval = state.termIndexInterval;
|
||||||
segmentCodecs = state.segmentCodecs;
|
segmentCodecs = state.segmentCodecs;
|
||||||
flushedFiles = state.flushedFiles;
|
|
||||||
this.codecId = codecId;
|
this.codecId = codecId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,11 +53,7 @@ final class StoredFieldsWriter {
|
||||||
fieldsWriter = null;
|
fieldsWriter = null;
|
||||||
lastDocID = 0;
|
lastDocID = 0;
|
||||||
|
|
||||||
String fieldsName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELDS_EXTENSION);
|
|
||||||
String fieldsIdxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
|
String fieldsIdxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
|
||||||
state.flushedFiles.add(fieldsName);
|
|
||||||
state.flushedFiles.add(fieldsIdxName);
|
|
||||||
|
|
||||||
if (4 + ((long) state.numDocs) * 8 != state.directory.fileLength(fieldsIdxName)) {
|
if (4 + ((long) state.numDocs) * 8 != state.directory.fileLength(fieldsIdxName)) {
|
||||||
throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
|
throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,17 +59,10 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
|
||||||
tvx = tvd = tvf = null;
|
tvx = tvd = tvf = null;
|
||||||
assert state.segmentName != null;
|
assert state.segmentName != null;
|
||||||
String idxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
|
String idxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
|
||||||
String fldName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
|
|
||||||
String docName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
|
|
||||||
|
|
||||||
if (4 + ((long) state.numDocs) * 16 != state.directory.fileLength(idxName)) {
|
if (4 + ((long) state.numDocs) * 16 != state.directory.fileLength(idxName)) {
|
||||||
throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
|
throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
|
||||||
}
|
}
|
||||||
|
|
||||||
state.flushedFiles.add(idxName);
|
|
||||||
state.flushedFiles.add(fldName);
|
|
||||||
state.flushedFiles.add(docName);
|
|
||||||
|
|
||||||
lastDocID = 0;
|
lastDocID = 0;
|
||||||
state.hasVectors = hasVectors;
|
state.hasVectors = hasVectors;
|
||||||
hasVectors = false;
|
hasVectors = false;
|
||||||
|
|
|
@ -57,7 +57,6 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
|
|
||||||
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException {
|
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException {
|
||||||
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
|
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
|
||||||
state.flushedFiles.add(indexFileName);
|
|
||||||
termIndexInterval = state.termIndexInterval;
|
termIndexInterval = state.termIndexInterval;
|
||||||
out = state.directory.createOutput(indexFileName);
|
out = state.directory.createOutput(indexFileName);
|
||||||
fieldInfos = state.fieldInfos;
|
fieldInfos = state.fieldInfos;
|
||||||
|
|
|
@ -74,7 +74,6 @@ public class PrefixCodedTermsWriter extends FieldsConsumer {
|
||||||
this.termComp = termComp;
|
this.termComp = termComp;
|
||||||
out = state.directory.createOutput(termsFileName);
|
out = state.directory.createOutput(termsFileName);
|
||||||
termsIndexWriter.setTermsOutput(out);
|
termsIndexWriter.setTermsOutput(out);
|
||||||
state.flushedFiles.add(termsFileName);
|
|
||||||
|
|
||||||
fieldInfos = state.fieldInfos;
|
fieldInfos = state.fieldInfos;
|
||||||
writeHeader(out);
|
writeHeader(out);
|
||||||
|
|
|
@ -145,7 +145,6 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
|
|
||||||
public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException {
|
public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException {
|
||||||
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
|
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
|
||||||
state.flushedFiles.add(indexFileName);
|
|
||||||
out = state.directory.createOutput(indexFileName);
|
out = state.directory.createOutput(indexFileName);
|
||||||
fieldInfos = state.fieldInfos;
|
fieldInfos = state.fieldInfos;
|
||||||
this.policy = policy;
|
this.policy = policy;
|
||||||
|
|
|
@ -85,24 +85,20 @@ public final class SepPostingsWriterImpl extends PostingsWriterBase {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
final String docFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, DOC_EXTENSION);
|
final String docFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, DOC_EXTENSION);
|
||||||
state.flushedFiles.add(docFileName);
|
|
||||||
docOut = factory.createOutput(state.directory, docFileName);
|
docOut = factory.createOutput(state.directory, docFileName);
|
||||||
docIndex = docOut.index();
|
docIndex = docOut.index();
|
||||||
|
|
||||||
if (state.fieldInfos.hasProx()) {
|
if (state.fieldInfos.hasProx()) {
|
||||||
final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, FREQ_EXTENSION);
|
final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, FREQ_EXTENSION);
|
||||||
state.flushedFiles.add(frqFileName);
|
|
||||||
freqOut = factory.createOutput(state.directory, frqFileName);
|
freqOut = factory.createOutput(state.directory, frqFileName);
|
||||||
freqIndex = freqOut.index();
|
freqIndex = freqOut.index();
|
||||||
|
|
||||||
final String posFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, POS_EXTENSION);
|
final String posFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, POS_EXTENSION);
|
||||||
posOut = factory.createOutput(state.directory, posFileName);
|
posOut = factory.createOutput(state.directory, posFileName);
|
||||||
state.flushedFiles.add(posFileName);
|
|
||||||
posIndex = posOut.index();
|
posIndex = posOut.index();
|
||||||
|
|
||||||
// TODO: -- only if at least one field stores payloads?
|
// TODO: -- only if at least one field stores payloads?
|
||||||
final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, PAYLOAD_EXTENSION);
|
final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, PAYLOAD_EXTENSION);
|
||||||
state.flushedFiles.add(payloadFileName);
|
|
||||||
payloadOut = state.directory.createOutput(payloadFileName);
|
payloadOut = state.directory.createOutput(payloadFileName);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -114,7 +110,6 @@ public final class SepPostingsWriterImpl extends PostingsWriterBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SKIP_EXTENSION);
|
final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SKIP_EXTENSION);
|
||||||
state.flushedFiles.add(skipFileName);
|
|
||||||
skipOut = state.directory.createOutput(skipFileName);
|
skipOut = state.directory.createOutput(skipFileName);
|
||||||
|
|
||||||
totalNumDocs = state.numDocs;
|
totalNumDocs = state.numDocs;
|
||||||
|
|
|
@ -46,7 +46,6 @@ class SimpleTextFieldsWriter extends FieldsConsumer {
|
||||||
public SimpleTextFieldsWriter(SegmentWriteState state) throws IOException {
|
public SimpleTextFieldsWriter(SegmentWriteState state) throws IOException {
|
||||||
final String fileName = SimpleTextCodec.getPostingsFileName(state.segmentName, state.codecId);
|
final String fileName = SimpleTextCodec.getPostingsFileName(state.segmentName, state.codecId);
|
||||||
out = state.directory.createOutput(fileName);
|
out = state.directory.createOutput(fileName);
|
||||||
state.flushedFiles.add(fileName);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void write(String s) throws IOException {
|
private void write(String s) throws IOException {
|
||||||
|
|
|
@ -61,14 +61,12 @@ public final class StandardPostingsWriter extends PostingsWriterBase {
|
||||||
public StandardPostingsWriter(SegmentWriteState state) throws IOException {
|
public StandardPostingsWriter(SegmentWriteState state) throws IOException {
|
||||||
super();
|
super();
|
||||||
String fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.FREQ_EXTENSION);
|
String fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.FREQ_EXTENSION);
|
||||||
state.flushedFiles.add(fileName);
|
|
||||||
freqOut = state.directory.createOutput(fileName);
|
freqOut = state.directory.createOutput(fileName);
|
||||||
|
|
||||||
if (state.fieldInfos.hasProx()) {
|
if (state.fieldInfos.hasProx()) {
|
||||||
// At least one field does not omit TF, so create the
|
// At least one field does not omit TF, so create the
|
||||||
// prox file
|
// prox file
|
||||||
fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.PROX_EXTENSION);
|
fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.PROX_EXTENSION);
|
||||||
state.flushedFiles.add(fileName);
|
|
||||||
proxOut = state.directory.createOutput(fileName);
|
proxOut = state.directory.createOutput(fileName);
|
||||||
} else {
|
} else {
|
||||||
// Every field omits TF so we will write no prox file
|
// Every field omits TF so we will write no prox file
|
||||||
|
|
|
@ -201,11 +201,12 @@ public class TestDoc extends LuceneTestCase {
|
||||||
r2.close();
|
r2.close();
|
||||||
|
|
||||||
final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
|
final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
|
||||||
useCompoundFile, merger.fieldInfos().hasProx(), merger.getSegmentCodecs(),
|
false, merger.fieldInfos().hasProx(), merger.getSegmentCodecs(),
|
||||||
merger.fieldInfos().hasVectors());
|
merger.fieldInfos().hasVectors());
|
||||||
|
|
||||||
if (useCompoundFile) {
|
if (useCompoundFile) {
|
||||||
Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
|
Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
|
||||||
|
info.setUseCompoundFile(true);
|
||||||
for (final String fileToDelete : filesToDelete)
|
for (final String fileToDelete : filesToDelete)
|
||||||
si1.dir.deleteFile(fileToDelete);
|
si1.dir.deleteFile(fileToDelete);
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,6 @@ public class MockRandomCodec extends Codec {
|
||||||
final IndexOutput out = state.directory.createOutput(seedFileName);
|
final IndexOutput out = state.directory.createOutput(seedFileName);
|
||||||
out.writeLong(seed);
|
out.writeLong(seed);
|
||||||
out.close();
|
out.close();
|
||||||
state.flushedFiles.add(seedFileName);
|
|
||||||
|
|
||||||
final Random random = new Random(seed);
|
final Random random = new Random(seed);
|
||||||
PostingsWriterBase postingsWriter;
|
PostingsWriterBase postingsWriter;
|
||||||
|
|
|
@ -46,18 +46,14 @@ class PreFlexFieldsWriter extends FieldsConsumer {
|
||||||
state.segmentName,
|
state.segmentName,
|
||||||
state.fieldInfos,
|
state.fieldInfos,
|
||||||
state.termIndexInterval);
|
state.termIndexInterval);
|
||||||
state.flushedFiles.add(IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.TERMS_EXTENSION));
|
|
||||||
state.flushedFiles.add(IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.TERMS_INDEX_EXTENSION));
|
|
||||||
|
|
||||||
final String freqFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.FREQ_EXTENSION);
|
final String freqFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.FREQ_EXTENSION);
|
||||||
freqOut = state.directory.createOutput(freqFile);
|
freqOut = state.directory.createOutput(freqFile);
|
||||||
state.flushedFiles.add(freqFile);
|
|
||||||
totalNumDocs = state.numDocs;
|
totalNumDocs = state.numDocs;
|
||||||
|
|
||||||
if (state.fieldInfos.hasProx()) {
|
if (state.fieldInfos.hasProx()) {
|
||||||
final String proxFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.PROX_EXTENSION);
|
final String proxFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.PROX_EXTENSION);
|
||||||
proxOut = state.directory.createOutput(proxFile);
|
proxOut = state.directory.createOutput(proxFile);
|
||||||
state.flushedFiles.add(proxFile);
|
|
||||||
} else {
|
} else {
|
||||||
proxOut = null;
|
proxOut = null;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue