LUCENE-3728: push compound shared doc stores reading into 3.x codec (only 3.0 indexes, only a hit if you also have vectors)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3661@1237746 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2012-01-30 16:02:37 +00:00
parent 668dea8016
commit 846338c0dc
4 changed files with 32 additions and 32 deletions

View File

@ -29,13 +29,13 @@ import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.IOUtils;
import java.io.Closeable;
import java.nio.charset.Charset;
import java.util.Set;
/**
@ -93,6 +93,10 @@ public final class Lucene3xStoredFieldsReader extends StoredFieldsReader impleme
// The docID offset where our docs begin in the index
// file. This will be 0 if we have our own private file.
private int docStoreOffset;
// when we are inside a compound share doc store (CFX),
// (lucene 3.0 indexes only), we privately open our own fd.
private final CompoundFileDirectory storeCFSReader;
/** Returns a cloned FieldsReader that shares open
* IndexInputs with the original one. It is the caller's
@ -131,6 +135,7 @@ public final class Lucene3xStoredFieldsReader extends StoredFieldsReader impleme
this.docStoreOffset = docStoreOffset;
this.fieldsStream = fieldsStream;
this.indexStream = indexStream;
this.storeCFSReader = null;
}
public Lucene3xStoredFieldsReader(Directory d, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException {
@ -140,6 +145,12 @@ public final class Lucene3xStoredFieldsReader extends StoredFieldsReader impleme
boolean success = false;
fieldInfos = fn;
try {
if (docStoreOffset != -1 && si.getDocStoreIsCompoundFile()) {
d = storeCFSReader = new CompoundFileDirectory(si.dir,
IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), context, false);
} else {
storeCFSReader = null;
}
fieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, "", FIELDS_EXTENSION), context);
final String indexStreamFN = IndexFileNames.segmentFileName(segment, "", FIELDS_INDEX_EXTENSION);
indexStream = d.openInput(indexStreamFN, context);
@ -200,7 +211,7 @@ public final class Lucene3xStoredFieldsReader extends StoredFieldsReader impleme
*/
public final void close() throws IOException {
if (!closed) {
IOUtils.close(fieldsStream, indexStream);
IOUtils.close(fieldsStream, indexStream, storeCFSReader);
closed = true;
}
}

View File

@ -38,6 +38,7 @@ import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
@ -90,6 +91,12 @@ public class Lucene3xTermVectorsReader extends TermVectorsReader {
// file. This will be 0 if we have our own private file.
private int docStoreOffset;
// when we are inside a compound share doc store (CFX),
// (lucene 3.0 indexes only), we privately open our own fd.
// TODO: if we are worried, maybe we could eliminate the
// extra fd somehow when you also have vectors...
private final CompoundFileDirectory storeCFSReader;
private final int format;
// used by clone
@ -102,6 +109,7 @@ public class Lucene3xTermVectorsReader extends TermVectorsReader {
this.numTotalDocs = numTotalDocs;
this.docStoreOffset = docStoreOffset;
this.format = format;
this.storeCFSReader = null;
}
public Lucene3xTermVectorsReader(Directory d, SegmentInfo si, FieldInfos fieldInfos, IOContext context)
@ -113,6 +121,12 @@ public class Lucene3xTermVectorsReader extends TermVectorsReader {
boolean success = false;
try {
if (docStoreOffset != -1 && si.getDocStoreIsCompoundFile()) {
d = storeCFSReader = new CompoundFileDirectory(si.dir,
IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), context, false);
} else {
storeCFSReader = null;
}
String idxName = IndexFileNames.segmentFileName(segment, "", VECTORS_INDEX_EXTENSION);
tvx = d.openInput(idxName, context);
format = checkValidFormat(tvx);
@ -170,7 +184,7 @@ public class Lucene3xTermVectorsReader extends TermVectorsReader {
}
public void close() throws IOException {
IOUtils.close(tvx, tvd, tvf);
IOUtils.close(tvx, tvd, tvf, storeCFSReader);
}
/**

View File

@ -23,7 +23,6 @@ import java.util.Map;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.SegmentInfosReader;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexInput;

View File

@ -65,7 +65,6 @@ final class SegmentCoreReaders {
final StoredFieldsReader fieldsReaderOrig;
final TermVectorsReader termVectorsReaderOrig;
final CompoundFileDirectory cfsReader;
final CompoundFileDirectory storeCFSReader;
final CloseableThreadLocal<StoredFieldsReader> fieldsReaderLocal = new CloseableThreadLocal<StoredFieldsReader>() {
@Override
@ -121,34 +120,11 @@ final class SegmentCoreReaders {
// kinda jaky to assume the codec handles the case of no norms file at all gracefully?!
norms = codec.normsFormat().docsProducer(segmentReadState);
perDocProducer = codec.docValuesFormat().docsProducer(segmentReadState);
final Directory storeDir;
if (si.getDocStoreOffset() != -1) {
if (si.getDocStoreIsCompoundFile()) {
storeCFSReader = new CompoundFileDirectory(dir,
IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
context, false);
storeDir = storeCFSReader;
assert storeDir != null;
} else {
storeCFSReader = null;
storeDir = dir;
assert storeDir != null;
}
} else if (si.getUseCompoundFile()) {
storeDir = cfsReader;
storeCFSReader = null;
assert storeDir != null;
} else {
storeDir = dir;
storeCFSReader = null;
assert storeDir != null;
}
fieldsReaderOrig = si.getCodec().storedFieldsFormat().fieldsReader(storeDir, si, fieldInfos, context);
fieldsReaderOrig = si.getCodec().storedFieldsFormat().fieldsReader(cfsDir, si, fieldInfos, context);
if (si.getHasVectors()) { // open term vector files only as needed
termVectorsReaderOrig = si.getCodec().termVectorsFormat().vectorsReader(storeDir, si, fieldInfos, context);
termVectorsReaderOrig = si.getCodec().termVectorsFormat().vectorsReader(cfsDir, si, fieldInfos, context);
} else {
termVectorsReaderOrig = null;
}
@ -175,7 +151,7 @@ final class SegmentCoreReaders {
//System.out.println("core.decRef seg=" + owner.getSegmentInfo() + " rc=" + ref);
if (ref.decrementAndGet() == 0) {
IOUtils.close(termVectorsLocal, fieldsReaderLocal, fields, perDocProducer,
termVectorsReaderOrig, fieldsReaderOrig, cfsReader, storeCFSReader, norms);
termVectorsReaderOrig, fieldsReaderOrig, cfsReader, norms);
notifyCoreClosedListeners();
}
}