restructuring of IndexReader, SegmentReader,

and Multireader in order to improve directory locking


git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@150285 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christoph Goller 2004-04-16 09:17:05 +00:00
parent 9e8542e4d2
commit e0ea2909c3
7 changed files with 290 additions and 170 deletions

View File

@ -73,61 +73,68 @@ public class FilterIndexReader extends IndexReader {
public void close() throws IOException { in.close(); }
}
protected IndexReader in;
protected IndexReader baseReader;
public FilterIndexReader(IndexReader in) {
super(in.directory());
segmentInfos = in.segmentInfos;
this.in = in;
/**
* <p>Construct a FilterIndexReader based on the specified base reader.
* Directory locking for delete, undeleteAll, and setNorm operations is
* left to the base reader.</p>
* <p>Note that base reader is closed if this FilterIndexReader is closed.</p>
* @param in specified base reader.
*/
public FilterIndexReader(IndexReader baseReader) {
super(baseReader.directory());
this.baseReader = baseReader;
}
public TermFreqVector[] getTermFreqVectors(int docNumber)
throws IOException {
return in.getTermFreqVectors(docNumber);
return baseReader.getTermFreqVectors(docNumber);
}
public TermFreqVector getTermFreqVector(int docNumber, String field)
throws IOException {
return in.getTermFreqVector(docNumber, field);
return baseReader.getTermFreqVector(docNumber, field);
}
public int numDocs() { return in.numDocs(); }
public int maxDoc() { return in.maxDoc(); }
public int numDocs() { return baseReader.numDocs(); }
public int maxDoc() { return baseReader.maxDoc(); }
public Document document(int n) throws IOException { return in.document(n); }
public Document document(int n) throws IOException { return baseReader.document(n); }
public boolean isDeleted(int n) { return in.isDeleted(n); }
public boolean hasDeletions() { return in.hasDeletions(); }
public void undeleteAll() throws IOException { in.undeleteAll(); }
public boolean isDeleted(int n) { return baseReader.isDeleted(n); }
public boolean hasDeletions() { return baseReader.hasDeletions(); }
protected void doUndeleteAll() throws IOException { baseReader.undeleteAll(); }
public byte[] norms(String f) throws IOException { return in.norms(f); }
public byte[] norms(String f) throws IOException { return baseReader.norms(f); }
public void norms(String f, byte[] bytes, int offset) throws IOException {
in.norms(f, bytes, offset);
baseReader.norms(f, bytes, offset);
}
public void setNorm(int d, String f, byte b) throws IOException {
in.setNorm(d, f, b);
protected void doSetNorm(int d, String f, byte b) throws IOException {
baseReader.setNorm(d, f, b);
}
public TermEnum terms() throws IOException { return in.terms(); }
public TermEnum terms(Term t) throws IOException { return in.terms(t); }
public TermEnum terms() throws IOException { return baseReader.terms(); }
public TermEnum terms(Term t) throws IOException { return baseReader.terms(t); }
public int docFreq(Term t) throws IOException { return in.docFreq(t); }
public int docFreq(Term t) throws IOException { return baseReader.docFreq(t); }
public TermDocs termDocs() throws IOException { return in.termDocs(); }
public TermDocs termDocs() throws IOException { return baseReader.termDocs(); }
public TermPositions termPositions() throws IOException {
return in.termPositions();
return baseReader.termPositions();
}
protected void doDelete(int n) throws IOException { in.doDelete(n); }
protected void doClose() throws IOException { in.doClose(); }
protected void doDelete(int n) throws IOException { baseReader.delete(n); }
protected void doCommit() throws IOException { baseReader.commit(); }
protected void doClose() throws IOException { baseReader.close(); }
public Collection getFieldNames() throws IOException {
return in.getFieldNames();
return baseReader.getFieldNames();
}
public Collection getFieldNames(boolean indexed) throws IOException {
return in.getFieldNames(indexed);
return baseReader.getFieldNames(indexed);
}
/**
@ -137,6 +144,6 @@ public class FilterIndexReader extends IndexReader {
* @return Collection of Strings indicating the names of the fields
*/
public Collection getIndexedFieldNames(boolean storedTermVector) {
return in.getIndexedFieldNames(storedTermVector);
return baseReader.getIndexedFieldNames(storedTermVector);
}
}

View File

@ -44,31 +44,69 @@ import org.apache.lucene.search.Similarity;
@version $Id$
*/
public abstract class IndexReader {
/**
* Constructor used if IndexReader is not owner of its directory.
* This is used for IndexReaders that are used within other IndexReaders that take care or locking directories.
*
* @param directory Directory where IndexReader files reside.
*/
protected IndexReader(Directory directory) {
this.directory = directory;
stale = false;
segmentInfos = null;
directoryOwner = false;
closeDirectory = false;
stale = false;
hasChanges = false;
writeLock = null;
}
/**
* Constructor used if IndexReader is owner of its directory.
* If IndexReader is owner of its directory, it locks its directory in case of write operations.
*
* @param directory Directory where IndexReader files reside.
* @param segmentInfos Used for write-l
* @param closeDirectory
*/
protected IndexReader(Directory directory, SegmentInfos segmentInfos, boolean closeDirectory) {
this.directory = directory;
this.segmentInfos = segmentInfos;
directoryOwner = true;
this.closeDirectory = closeDirectory;
stale = false;
hasChanges = false;
writeLock = null;
}
private Directory directory;
final private Directory directory;
final private boolean directoryOwner;
final private SegmentInfos segmentInfos;
private Lock writeLock;
SegmentInfos segmentInfos = null;
private boolean stale = false;
private boolean stale;
private boolean hasChanges;
final private boolean closeDirectory;
/** Returns an IndexReader reading the index in an FSDirectory in the named
path. */
public static IndexReader open(String path) throws IOException {
return open(FSDirectory.getDirectory(path, false));
return open(FSDirectory.getDirectory(path, false), true);
}
/** Returns an IndexReader reading the index in an FSDirectory in the named
path. */
public static IndexReader open(File path) throws IOException {
return open(FSDirectory.getDirectory(path, false));
return open(FSDirectory.getDirectory(path, false), true);
}
/** Returns an IndexReader reading the index in the given Directory. */
public static IndexReader open(final Directory directory) throws IOException {
return open(directory, false);
}
private static IndexReader open(final Directory directory, final boolean closeDirectory) throws IOException {
synchronized (directory) { // in- & inter-process sync
return (IndexReader)new Lock.With(
directory.makeLock(IndexWriter.COMMIT_LOCK_NAME),
@ -77,12 +115,12 @@ public abstract class IndexReader {
SegmentInfos infos = new SegmentInfos();
infos.read(directory);
if (infos.size() == 1) { // index is optimized
return new SegmentReader(infos, infos.info(0), true);
return new SegmentReader(infos, infos.info(0), closeDirectory);
} else {
IndexReader[] readers = new IndexReader[infos.size()];
for (int i = 0; i < infos.size(); i++)
readers[i] = new SegmentReader(infos, infos.info(i), i==infos.size()-1);
return new MultiReader(directory, readers);
readers[i] = new SegmentReader(infos.info(i));
return new MultiReader(directory, infos, closeDirectory, readers);
}
}
}.run();
@ -272,7 +310,16 @@ public abstract class IndexReader {
* @see #norms(String)
* @see Similarity#decodeNorm(byte)
*/
public abstract void setNorm(int doc, String field, byte value)
public final synchronized void setNorm(int doc, String field, byte value)
throws IOException{
if(directoryOwner)
aquireWriteLock();
doSetNorm(doc, field, value);
hasChanges = true;
}
/** Implements setNorm in subclass.*/
protected abstract void doSetNorm(int doc, String field, byte value)
throws IOException;
/** Expert: Resets the normalization factor for the named field of the named
@ -346,16 +393,15 @@ public abstract class IndexReader {
/** Returns an unpositioned {@link TermPositions} enumerator. */
public abstract TermPositions termPositions() throws IOException;
/** Deletes the document numbered <code>docNum</code>. Once a document is
deleted it will not appear in TermDocs or TermPostitions enumerations.
Attempts to read its field with the {@link #document}
method will result in an error. The presence of this document may still be
reflected in the {@link #docFreq} statistic, though
this will be corrected eventually as the index is further modified.
/**
* Trys to acquire the WriteLock on this directory.
* this method is only valid if this IndexReader is directory owner.
*
* @throws IOException If WriteLock cannot be acquired.
*/
public final synchronized void delete(int docNum) throws IOException {
private void aquireWriteLock() throws IOException {
if (stale)
throw new IOException("IndexReader out of date and no longer valid for deletion");
throw new IOException("IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
if (writeLock == null) {
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
@ -365,14 +411,27 @@ public abstract class IndexReader {
// we have to check whether index has changed since this reader was opened.
// if so, this reader is no longer valid for deletion
if (segmentInfos != null && SegmentInfos.readCurrentVersion(directory) > segmentInfos.getVersion()) {
if (SegmentInfos.readCurrentVersion(directory) > segmentInfos.getVersion()) {
stale = true;
this.writeLock.release();
this.writeLock = null;
throw new IOException("IndexReader out of date and no longer valid for deletion");
throw new IOException("IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
}
}
}
/** Deletes the document numbered <code>docNum</code>. Once a document is
deleted it will not appear in TermDocs or TermPostitions enumerations.
Attempts to read its field with the {@link #document}
method will result in an error. The presence of this document may still be
reflected in the {@link #docFreq} statistic, though
this will be corrected eventually as the index is further modified.
*/
public final synchronized void delete(int docNum) throws IOException {
if(directoryOwner)
aquireWriteLock();
doDelete(docNum);
hasChanges = true;
}
/** Implements deletion of the document numbered <code>docNum</code>.
@ -402,19 +461,58 @@ public abstract class IndexReader {
}
/** Undeletes all documents currently marked as deleted in this index.*/
public abstract void undeleteAll() throws IOException;
public final synchronized void undeleteAll() throws IOException{
if(directoryOwner)
aquireWriteLock();
doUndeleteAll();
hasChanges = true;
}
/** Implements actual undeleteAll() in subclass. */
protected abstract void doUndeleteAll() throws IOException;
/**
* Commit changes resulting from delete, undeleteAll, or setNorm operations
*
* @throws IOException
*/
protected final synchronized void commit() throws IOException{
if(hasChanges){
if(directoryOwner){
synchronized (directory) { // in- & inter-process sync
new Lock.With(directory.makeLock(IndexWriter.COMMIT_LOCK_NAME),
IndexWriter.COMMIT_LOCK_TIMEOUT) {
public Object doBody() throws IOException {
doCommit();
segmentInfos.write(directory);
return null;
}
}.run();
}
if (writeLock != null) {
writeLock.release(); // release write lock
writeLock = null;
}
}
else
doCommit();
}
hasChanges = false;
}
/** Implements commit. */
protected abstract void doCommit() throws IOException;
/**
* Closes files associated with this index.
* Also saves any new deletions to disk.
* No other methods should be called after this has been called.
*/
public final synchronized void close() throws IOException {
commit();
doClose();
if (writeLock != null) {
writeLock.release(); // release write lock
writeLock = null;
}
if(closeDirectory)
directory.close();
}
/** Implements close. */

View File

@ -31,32 +31,44 @@ import org.apache.lucene.store.Directory;
* @version $Id$
*/
public class MultiReader extends IndexReader {
private IndexReader[] readers;
private IndexReader[] subReaders;
private int[] starts; // 1st docno for each segment
private Hashtable normsCache = new Hashtable();
private int maxDoc = 0;
private int numDocs = -1;
private boolean hasDeletions = false;
/** Construct reading the named set of readers. */
public MultiReader(IndexReader[] readers) throws IOException {
this(readers.length == 0 ? null : readers[0].directory(), readers);
/**
* <p>Construct a MultiReader aggregating the named set of (sub)readers.
* Directory locking for delete, undeleteAll, and setNorm operations is
* left to the subreaders. </p>
* <p>Note that all subreaders are closed if this Multireader is closed.</p>
* @param readers set of (sub)readers
* @throws IOException
*/
public MultiReader(IndexReader[] subReaders) throws IOException {
super(subReaders.length == 0 ? null : subReaders[0].directory());
initialize(subReaders);
}
/** Construct reading the named set of readers. */
public MultiReader(Directory directory, IndexReader[] readers)
MultiReader(Directory directory, SegmentInfos sis, boolean closeDirectory, IndexReader[] subReaders)
throws IOException {
super(directory);
this.readers = readers;
starts = new int[readers.length + 1]; // build starts array
for (int i = 0; i < readers.length; i++) {
super(directory, sis, closeDirectory);
initialize(subReaders);
}
private void initialize(IndexReader[] subReaders) throws IOException{
this.subReaders = subReaders;
starts = new int[subReaders.length + 1]; // build starts array
for (int i = 0; i < subReaders.length; i++) {
starts[i] = maxDoc;
maxDoc += readers[i].maxDoc(); // compute maxDocs
maxDoc += subReaders[i].maxDoc(); // compute maxDocs
if (readers[i].hasDeletions())
if (subReaders[i].hasDeletions())
hasDeletions = true;
}
starts[readers.length] = maxDoc;
starts[subReaders.length] = maxDoc;
}
@ -69,20 +81,20 @@ public class MultiReader extends IndexReader {
public TermFreqVector[] getTermFreqVectors(int n)
throws IOException {
int i = readerIndex(n); // find segment num
return readers[i].getTermFreqVectors(n - starts[i]); // dispatch to segment
return subReaders[i].getTermFreqVectors(n - starts[i]); // dispatch to segment
}
public TermFreqVector getTermFreqVector(int n, String field)
throws IOException {
int i = readerIndex(n); // find segment num
return readers[i].getTermFreqVector(n - starts[i], field);
return subReaders[i].getTermFreqVector(n - starts[i], field);
}
public synchronized int numDocs() {
if (numDocs == -1) { // check cache
int n = 0; // cache miss--recompute
for (int i = 0; i < readers.length; i++)
n += readers[i].numDocs(); // sum from readers
for (int i = 0; i < subReaders.length; i++)
n += subReaders[i].numDocs(); // sum from readers
numDocs = n;
}
return numDocs;
@ -94,32 +106,32 @@ public class MultiReader extends IndexReader {
public Document document(int n) throws IOException {
int i = readerIndex(n); // find segment num
return readers[i].document(n - starts[i]); // dispatch to segment reader
return subReaders[i].document(n - starts[i]); // dispatch to segment reader
}
public boolean isDeleted(int n) {
int i = readerIndex(n); // find segment num
return readers[i].isDeleted(n - starts[i]); // dispatch to segment reader
return subReaders[i].isDeleted(n - starts[i]); // dispatch to segment reader
}
public boolean hasDeletions() { return hasDeletions; }
protected synchronized void doDelete(int n) throws IOException {
protected void doDelete(int n) throws IOException {
numDocs = -1; // invalidate cache
int i = readerIndex(n); // find segment num
readers[i].doDelete(n - starts[i]); // dispatch to segment reader
subReaders[i].delete(n - starts[i]); // dispatch to segment reader
hasDeletions = true;
}
public void undeleteAll() throws IOException {
for (int i = 0; i < readers.length; i++)
readers[i].undeleteAll();
protected void doUndeleteAll() throws IOException {
for (int i = 0; i < subReaders.length; i++)
subReaders[i].undeleteAll();
hasDeletions = false;
}
private int readerIndex(int n) { // find reader for doc n:
int lo = 0; // search starts array
int hi = readers.length - 1; // for first element less
int hi = subReaders.length - 1; // for first element less
while (hi >= lo) {
int mid = (lo + hi) >> 1;
@ -129,7 +141,7 @@ public class MultiReader extends IndexReader {
else if (n > midValue)
lo = mid + 1;
else { // found a match
while (mid+1 < readers.length && starts[mid+1] == midValue) {
while (mid+1 < subReaders.length && starts[mid+1] == midValue) {
mid++; // scan to last match
}
return mid;
@ -144,8 +156,8 @@ public class MultiReader extends IndexReader {
return bytes; // cache hit
bytes = new byte[maxDoc()];
for (int i = 0; i < readers.length; i++)
readers[i].norms(field, bytes, starts[i]);
for (int i = 0; i < subReaders.length; i++)
subReaders[i].norms(field, bytes, starts[i]);
normsCache.put(field, bytes); // update cache
return bytes;
}
@ -156,43 +168,48 @@ public class MultiReader extends IndexReader {
if (bytes != null) // cache hit
System.arraycopy(bytes, 0, result, offset, maxDoc());
for (int i = 0; i < readers.length; i++) // read from segments
readers[i].norms(field, result, offset + starts[i]);
for (int i = 0; i < subReaders.length; i++) // read from segments
subReaders[i].norms(field, result, offset + starts[i]);
}
public synchronized void setNorm(int n, String field, byte value)
protected void doSetNorm(int n, String field, byte value)
throws IOException {
normsCache.remove(field); // clear cache
int i = readerIndex(n); // find segment num
readers[i].setNorm(n-starts[i], field, value); // dispatch
subReaders[i].setNorm(n-starts[i], field, value); // dispatch
}
public TermEnum terms() throws IOException {
return new MultiTermEnum(readers, starts, null);
return new MultiTermEnum(subReaders, starts, null);
}
public TermEnum terms(Term term) throws IOException {
return new MultiTermEnum(readers, starts, term);
return new MultiTermEnum(subReaders, starts, term);
}
public int docFreq(Term t) throws IOException {
int total = 0; // sum freqs in segments
for (int i = 0; i < readers.length; i++)
total += readers[i].docFreq(t);
for (int i = 0; i < subReaders.length; i++)
total += subReaders[i].docFreq(t);
return total;
}
public TermDocs termDocs() throws IOException {
return new MultiTermDocs(readers, starts);
return new MultiTermDocs(subReaders, starts);
}
public TermPositions termPositions() throws IOException {
return new MultiTermPositions(readers, starts);
return new MultiTermPositions(subReaders, starts);
}
protected void doCommit() throws IOException {
for (int i = 0; i < subReaders.length; i++)
subReaders[i].commit();
}
protected synchronized void doClose() throws IOException {
for (int i = 0; i < readers.length; i++)
readers[i].close();
for (int i = 0; i < subReaders.length; i++)
subReaders[i].close();
}
/**
@ -201,8 +218,8 @@ public class MultiReader extends IndexReader {
public Collection getFieldNames() throws IOException {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < readers.length; i++) {
IndexReader reader = readers[i];
for (int i = 0; i < subReaders.length; i++) {
IndexReader reader = subReaders[i];
Collection names = reader.getFieldNames();
// iterate through the field names and add them to the set
for (Iterator iterator = names.iterator(); iterator.hasNext();) {
@ -219,8 +236,8 @@ public class MultiReader extends IndexReader {
public Collection getFieldNames(boolean indexed) throws IOException {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < readers.length; i++) {
IndexReader reader = readers[i];
for (int i = 0; i < subReaders.length; i++) {
IndexReader reader = subReaders[i];
Collection names = reader.getFieldNames(indexed);
fieldSet.addAll(names);
}
@ -230,8 +247,8 @@ public class MultiReader extends IndexReader {
public Collection getIndexedFieldNames(boolean storedTermVector) {
// maintain a unique set of field names
Set fieldSet = new HashSet();
for (int i = 0; i < readers.length; i++) {
IndexReader reader = readers[i];
for (int i = 0; i < subReaders.length; i++) {
IndexReader reader = subReaders[i];
Collection names = reader.getIndexedFieldNames(storedTermVector);
fieldSet.addAll(names);
}

View File

@ -37,7 +37,6 @@ import org.apache.lucene.util.BitVector;
* @version $Id$
*/
final class SegmentReader extends IndexReader {
private boolean closeDirectory = false;
private String segment;
FieldInfos fieldInfos;
@ -49,6 +48,7 @@ final class SegmentReader extends IndexReader {
BitVector deletedDocs = null;
private boolean deletedDocsDirty = false;
private boolean normsDirty = false;
private boolean undeleteAll = false;
InputStream freqStream;
InputStream proxStream;
@ -57,13 +57,18 @@ final class SegmentReader extends IndexReader {
CompoundFileReader cfsReader;
private class Norm {
public Norm(InputStream in) { this.in = in; }
public Norm(InputStream in, int number)
{
this.in = in;
this.number = number;
}
private InputStream in;
private byte[] bytes;
private boolean dirty;
private int number;
private void reWrite(String name) throws IOException {
private void reWrite() throws IOException {
// NOTE: norms are re-written in regular directory, not cfs
OutputStream out = directory().createFile(segment + ".tmp");
try {
@ -71,7 +76,7 @@ final class SegmentReader extends IndexReader {
} finally {
out.close();
}
String fileName = segment + ".f" + fieldInfos.fieldNumber(name);
String fileName = segment + ".f" + number;
directory().renameFile(segment + ".tmp", fileName);
this.dirty = false;
}
@ -81,14 +86,17 @@ final class SegmentReader extends IndexReader {
SegmentReader(SegmentInfos sis, SegmentInfo si, boolean closeDir)
throws IOException {
this(si);
closeDirectory = closeDir;
segmentInfos = sis;
super(si.dir, sis, closeDir);
initialize(si);
}
SegmentReader(SegmentInfo si)
throws IOException {
SegmentReader(SegmentInfo si) throws IOException {
super(si.dir);
initialize(si);
}
private void initialize(SegmentInfo si) throws IOException
{
segment = si.name;
// Use compound file directory for some files, if it exists
@ -119,42 +127,29 @@ final class SegmentReader extends IndexReader {
}
}
protected final synchronized void doClose() throws IOException {
if (deletedDocsDirty || normsDirty) {
synchronized (directory()) { // in- & inter-process sync
new Lock.With(directory().makeLock(IndexWriter.COMMIT_LOCK_NAME),
IndexWriter.COMMIT_LOCK_TIMEOUT) {
public Object doBody() throws IOException {
if (deletedDocsDirty) { // re-write deleted
deletedDocs.write(directory(), segment + ".tmp");
directory().renameFile(segment + ".tmp", segment + ".del");
}
if (normsDirty) { // re-write norms
Enumeration keys = norms.keys();
Enumeration values = norms.elements();
while (values.hasMoreElements()) {
String field = (String) keys.nextElement();
Norm norm = (Norm) values.nextElement();
if (norm.dirty) {
norm.reWrite(field);
}
}
}
if (segmentInfos != null)
segmentInfos.write(directory());
else
directory().touchFile("segments");
return null;
}
}.run();
}
deletedDocsDirty = false;
normsDirty = false;
protected final void doCommit() throws IOException {
if (deletedDocsDirty) { // re-write deleted
deletedDocs.write(directory(), segment + ".tmp");
directory().renameFile(segment + ".tmp", segment + ".del");
}
if(undeleteAll && directory().fileExists(segment + ".del")){
directory().deleteFile(segment + ".del");
}
if (normsDirty) { // re-write norms
Enumeration values = norms.elements();
while (values.hasMoreElements()) {
Norm norm = (Norm) values.nextElement();
if (norm.dirty) {
norm.reWrite();
}
}
}
deletedDocsDirty = false;
normsDirty = false;
undeleteAll = false;
}
protected final void doClose() throws IOException {
fieldsReader.close();
tis.close();
@ -168,9 +163,6 @@ final class SegmentReader extends IndexReader {
if (cfsReader != null)
cfsReader.close();
if (closeDirectory)
directory().close();
}
static final boolean hasDeletions(SegmentInfo si) throws IOException {
@ -186,30 +178,20 @@ final class SegmentReader extends IndexReader {
return si.dir.fileExists(si.name + ".cfs");
}
protected final synchronized void doDelete(int docNum) throws IOException {
protected final void doDelete(int docNum) throws IOException {
if (deletedDocs == null)
deletedDocs = new BitVector(maxDoc());
deletedDocsDirty = true;
undeleteAll = false;
deletedDocs.set(docNum);
}
public synchronized void undeleteAll() throws IOException {
synchronized (directory()) { // in- & inter-process sync
new Lock.With(directory().makeLock(IndexWriter.COMMIT_LOCK_NAME),
IndexWriter.COMMIT_LOCK_TIMEOUT) {
public Object doBody() throws IOException {
if (directory().fileExists(segment + ".del")) {
directory().deleteFile(segment + ".del");
}
return null;
}
};
protected final void doUndeleteAll() throws IOException {
deletedDocs = null;
deletedDocsDirty = false;
}
undeleteAll = true;
}
final Vector files() throws IOException {
Vector files = new Vector(16);
final String ext[] = new String[]{
@ -334,7 +316,7 @@ final class SegmentReader extends IndexReader {
return norm.bytes;
}
public synchronized void setNorm(int doc, String field, byte value)
protected final void doSetNorm(int doc, String field, byte value)
throws IOException {
Norm norm = (Norm) norms.get(field);
if (norm == null) // not an indexed field
@ -374,7 +356,7 @@ final class SegmentReader extends IndexReader {
String fileName = segment + ".f" + fi.number;
// look first for re-written file, then in compound format
Directory d = directory().fileExists(fileName) ? directory() : cfsDir;
norms.put(fi.name, new Norm(d.openFile(fileName)));
norms.put(fi.name, new Norm(d.openFile(fileName), fi.number));
}
}
}

View File

@ -81,12 +81,12 @@ public class TestFilterIndexReader extends TestCase {
/** Filter terms with TestTermEnum. */
public TermEnum terms() throws IOException {
return new TestTermEnum(in.terms());
return new TestTermEnum(baseReader.terms());
}
/** Filter positions with TestTermPositions. */
public TermPositions termPositions() throws IOException {
return new TestTermPositions(in.termPositions());
return new TestTermPositions(baseReader.termPositions());
}
}

View File

@ -173,8 +173,15 @@ public class TestIndexReader extends TestCase
}
public void testDeleteReaderWriterConflictUnoptimized() throws IOException{
deleteReaderWriterConflict(false);
}
public void testDeleteReaderWriterConflictOptimized() throws IOException{
deleteReaderWriterConflict(true);
}
public void testDeleteReaderWriterConflict() throws IOException
private void deleteReaderWriterConflict(boolean optimize) throws IOException
{
//Directory dir = new RAMDirectory();
Directory dir = getDirectory(true);
@ -210,7 +217,8 @@ public class TestIndexReader extends TestCase
// searchers. Because of this, deletions made via a previously open
// reader, which would be applied to that reader's segment, are lost
// for subsequent searchers/readers
writer.optimize();
if(optimize)
writer.optimize();
writer.close();
// The reader should not see the new data
@ -288,8 +296,15 @@ public class TestIndexReader extends TestCase
dir = getDirectory(true);
}
public void testDeleteReaderReaderConflict() throws IOException
public void testDeleteReaderReaderConflictUnoptimized() throws IOException{
deleteReaderReaderConflict(false);
}
public void testDeleteReaderReaderConflictOptimized() throws IOException{
deleteReaderReaderConflict(true);
}
private void deleteReaderReaderConflict(boolean optimize) throws IOException
{
Directory dir = getDirectory(true);
@ -307,7 +322,8 @@ public class TestIndexReader extends TestCase
addDoc(writer, searchTerm2.text());
addDoc(writer, searchTerm3.text());
}
writer.optimize();
if(optimize)
writer.optimize();
writer.close();
// OPEN TWO READERS

View File

@ -70,7 +70,7 @@ public class TestMultiReader extends TestCase {
public void testDocument() {
try {
sis.read(dir);
MultiReader reader = new MultiReader(dir, readers);
MultiReader reader = new MultiReader(dir, sis, false, readers);
assertTrue(reader != null);
Document newDoc1 = reader.document(0);
assertTrue(newDoc1 != null);
@ -88,7 +88,7 @@ public class TestMultiReader extends TestCase {
public void testTermVectors() {
try {
MultiReader reader = new MultiReader(dir, readers);
MultiReader reader = new MultiReader(dir, sis, false, readers);
assertTrue(reader != null);
} catch (IOException e) {
e.printStackTrace();