cleanup: don't track flushedFiles when we can just compute that from the SegmentInfo

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1055435 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2011-01-05 11:44:29 +00:00
parent 87274d00ac
commit 568785de21
17 changed files with 10 additions and 76 deletions

View File

@ -64,7 +64,6 @@ final class DocFieldProcessor extends DocConsumer {
// FieldInfo.storePayload.
final String fileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELD_INFOS_EXTENSION);
fieldInfos.write(state.directory, fileName);
state.flushedFiles.add(fileName);
}
@Override

View File

@ -599,7 +599,7 @@ final class DocumentsWriter {
if (infoStream != null) {
message("new segment has " + (flushState.hasVectors ? "vectors" : "no vectors"));
message("flushedFiles=" + flushState.flushedFiles);
message("flushedFiles=" + newSegment.files());
message("flushed codecs=" + newSegment.getSegmentCodecs());
}
@ -611,12 +611,11 @@ final class DocumentsWriter {
}
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, cfsFileName);
for(String fileName : flushState.flushedFiles) {
for(String fileName : newSegment.files()) {
cfsWriter.addFile(fileName);
}
cfsWriter.close();
deleter.deleteNewFiles(flushState.flushedFiles);
deleter.deleteNewFiles(newSegment.files());
newSegment.setUseCompoundFile(true);
}

View File

@ -2256,11 +2256,11 @@ public class IndexWriter implements Closeable {
// Now create the compound file if needed
if (useCompoundFile) {
merger.createCompoundFile(mergedName + ".cfs", info);
info.setUseCompoundFile(true);
// delete new non cfs files directly: they were never
// registered with IFD
deleter.deleteNewFiles(merger.getMergedFiles(info));
deleter.deleteNewFiles(info.files());
info.setUseCompoundFile(true);
}
// Register the new segment
@ -3159,7 +3159,7 @@ public class IndexWriter implements Closeable {
synchronized(this) {
deleter.deleteFile(compoundFileName);
deleter.deleteNewFiles(merger.getMergedFiles(merge.info));
deleter.deleteNewFiles(merge.info.files());
}
}
}
@ -3170,7 +3170,7 @@ public class IndexWriter implements Closeable {
// delete new non cfs files directly: they were never
// registered with IFD
deleter.deleteNewFiles(merger.getMergedFiles(merge.info));
deleter.deleteNewFiles(merge.info.files());
if (merge.isAborted()) {
if (infoStream != null) {

View File

@ -89,7 +89,6 @@ final class NormsWriter extends InvertedDocEndConsumer {
}
final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
state.flushedFiles.add(normsFileName);
IndexOutput normsOut = state.directory.createOutput(normsFileName);
try {

View File

@ -20,8 +20,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
import java.util.HashSet;
import java.util.List;
import org.apache.lucene.document.Document;
@ -127,41 +125,11 @@ final class SegmentMerger {
return mergedDocs;
}
final Collection<String> getMergedFiles(final SegmentInfo info) throws IOException {
Set<String> fileSet = new HashSet<String>();
// Basic files
for (String ext : IndexFileNames.COMPOUND_EXTENSIONS_NOT_CODEC) {
fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
}
segmentWriteState.segmentCodecs.files(directory, info, fileSet);
// Fieldable norm files
int numFIs = fieldInfos.size();
for (int i = 0; i < numFIs; i++) {
FieldInfo fi = fieldInfos.fieldInfo(i);
if (fi.isIndexed && !fi.omitNorms) {
fileSet.add(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
break;
}
}
// Vector files
if (fieldInfos.hasVectors()) {
for (String ext : IndexFileNames.VECTOR_EXTENSIONS) {
fileSet.add(IndexFileNames.segmentFileName(segment, "", ext));
}
}
return fileSet;
}
final Collection<String> createCompoundFile(String fileName, final SegmentInfo info)
throws IOException {
// Now merge all added files
Collection<String> files = getMergedFiles(info);
Collection<String> files = info.files();
CompoundFileWriter cfsWriter = new CompoundFileWriter(directory, fileName, checkAbort);
for (String file : files) {
cfsWriter.addFile(file);

View File

@ -18,8 +18,6 @@ package org.apache.lucene.index;
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import org.apache.lucene.store.Directory;
@ -33,7 +31,6 @@ public class SegmentWriteState {
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
public final Collection<String> flushedFiles;
final SegmentCodecs segmentCodecs;
public final String codecId;
@ -68,7 +65,6 @@ public class SegmentWriteState {
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
}
@ -83,7 +79,6 @@ public class SegmentWriteState {
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
}
}

View File

@ -53,11 +53,7 @@ final class StoredFieldsWriter {
fieldsWriter = null;
lastDocID = 0;
String fieldsName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELDS_EXTENSION);
String fieldsIdxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
state.flushedFiles.add(fieldsName);
state.flushedFiles.add(fieldsIdxName);
if (4 + ((long) state.numDocs) * 8 != state.directory.fileLength(fieldsIdxName)) {
throw new RuntimeException("after flush: fdx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(fieldsIdxName) + " length in bytes of " + fieldsIdxName + " file exists?=" + state.directory.fileExists(fieldsIdxName));
}

View File

@ -59,17 +59,10 @@ final class TermVectorsTermsWriter extends TermsHashConsumer {
tvx = tvd = tvf = null;
assert state.segmentName != null;
String idxName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
String fldName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
String docName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
if (4 + ((long) state.numDocs) * 16 != state.directory.fileLength(idxName)) {
throw new RuntimeException("after flush: tvx size mismatch: " + state.numDocs + " docs vs " + state.directory.fileLength(idxName) + " length in bytes of " + idxName + " file exists?=" + state.directory.fileExists(idxName));
}
state.flushedFiles.add(idxName);
state.flushedFiles.add(fldName);
state.flushedFiles.add(docName);
lastDocID = 0;
state.hasVectors = hasVectors;
hasVectors = false;

View File

@ -57,7 +57,6 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException {
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
state.flushedFiles.add(indexFileName);
termIndexInterval = state.termIndexInterval;
out = state.directory.createOutput(indexFileName);
fieldInfos = state.fieldInfos;

View File

@ -74,7 +74,6 @@ public class PrefixCodedTermsWriter extends FieldsConsumer {
this.termComp = termComp;
out = state.directory.createOutput(termsFileName);
termsIndexWriter.setTermsOutput(out);
state.flushedFiles.add(termsFileName);
fieldInfos = state.fieldInfos;
writeHeader(out);

View File

@ -145,7 +145,6 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException {
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
state.flushedFiles.add(indexFileName);
out = state.directory.createOutput(indexFileName);
fieldInfos = state.fieldInfos;
this.policy = policy;

View File

@ -85,24 +85,20 @@ public final class SepPostingsWriterImpl extends PostingsWriterBase {
super();
final String docFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, DOC_EXTENSION);
state.flushedFiles.add(docFileName);
docOut = factory.createOutput(state.directory, docFileName);
docIndex = docOut.index();
if (state.fieldInfos.hasProx()) {
final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, FREQ_EXTENSION);
state.flushedFiles.add(frqFileName);
freqOut = factory.createOutput(state.directory, frqFileName);
freqIndex = freqOut.index();
final String posFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, POS_EXTENSION);
posOut = factory.createOutput(state.directory, posFileName);
state.flushedFiles.add(posFileName);
posIndex = posOut.index();
// TODO: -- only if at least one field stores payloads?
final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, PAYLOAD_EXTENSION);
state.flushedFiles.add(payloadFileName);
payloadOut = state.directory.createOutput(payloadFileName);
} else {
@ -114,7 +110,6 @@ public final class SepPostingsWriterImpl extends PostingsWriterBase {
}
final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SKIP_EXTENSION);
state.flushedFiles.add(skipFileName);
skipOut = state.directory.createOutput(skipFileName);
totalNumDocs = state.numDocs;

View File

@ -46,7 +46,6 @@ class SimpleTextFieldsWriter extends FieldsConsumer {
public SimpleTextFieldsWriter(SegmentWriteState state) throws IOException {
final String fileName = SimpleTextCodec.getPostingsFileName(state.segmentName, state.codecId);
out = state.directory.createOutput(fileName);
state.flushedFiles.add(fileName);
}
private void write(String s) throws IOException {

View File

@ -61,14 +61,12 @@ public final class StandardPostingsWriter extends PostingsWriterBase {
public StandardPostingsWriter(SegmentWriteState state) throws IOException {
super();
String fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.FREQ_EXTENSION);
state.flushedFiles.add(fileName);
freqOut = state.directory.createOutput(fileName);
if (state.fieldInfos.hasProx()) {
// At least one field does not omit TF, so create the
// prox file
fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.PROX_EXTENSION);
state.flushedFiles.add(fileName);
proxOut = state.directory.createOutput(fileName);
} else {
// Every field omits TF so we will write no prox file

View File

@ -201,11 +201,12 @@ public class TestDoc extends LuceneTestCase {
r2.close();
final SegmentInfo info = new SegmentInfo(merged, si1.docCount + si2.docCount, si1.dir,
useCompoundFile, merger.fieldInfos().hasProx(), merger.getSegmentCodecs(),
false, merger.fieldInfos().hasProx(), merger.getSegmentCodecs(),
merger.fieldInfos().hasVectors());
if (useCompoundFile) {
Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
info.setUseCompoundFile(true);
for (final String fileToDelete : filesToDelete)
si1.dir.deleteFile(fileToDelete);
}

View File

@ -78,7 +78,6 @@ public class MockRandomCodec extends Codec {
final IndexOutput out = state.directory.createOutput(seedFileName);
out.writeLong(seed);
out.close();
state.flushedFiles.add(seedFileName);
final Random random = new Random(seed);
PostingsWriterBase postingsWriter;

View File

@ -46,18 +46,14 @@ class PreFlexFieldsWriter extends FieldsConsumer {
state.segmentName,
state.fieldInfos,
state.termIndexInterval);
state.flushedFiles.add(IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.TERMS_EXTENSION));
state.flushedFiles.add(IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.TERMS_INDEX_EXTENSION));
final String freqFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.FREQ_EXTENSION);
freqOut = state.directory.createOutput(freqFile);
state.flushedFiles.add(freqFile);
totalNumDocs = state.numDocs;
if (state.fieldInfos.hasProx()) {
final String proxFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.PROX_EXTENSION);
proxOut = state.directory.createOutput(proxFile);
state.flushedFiles.add(proxFile);
} else {
proxOut = null;
}