mirror of https://github.com/apache/lucene.git
LUCENE-818: throw AlreadyClosedException when accessing IndexWriter, *Reader, RAMDirectory and FieldsReader after close()
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@518262 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4648912089
commit
9da8211775
|
@ -29,6 +29,11 @@ API Changes
|
||||||
share an index over NFS by customizing when prior commits are
|
share an index over NFS by customizing when prior commits are
|
||||||
deleted. (Mike McCandless)
|
deleted. (Mike McCandless)
|
||||||
|
|
||||||
|
4. LUCENE-818: changed most public methods of IndexWriter,
|
||||||
|
IndexReader (and its subclasses), FieldsReader and RAMDirectory to
|
||||||
|
throw AlreadyClosedException if they are accessed after being
|
||||||
|
closed. (Mike McCandless)
|
||||||
|
|
||||||
Bug fixes
|
Bug fixes
|
||||||
|
|
||||||
1. LUCENE-804: Fixed build.xml to pack a fully compilable src dist. (Doron Cohen)
|
1. LUCENE-804: Fixed build.xml to pack a fully compilable src dist. (Doron Cohen)
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.lucene.index;
|
||||||
import org.apache.lucene.document.*;
|
import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -46,6 +47,7 @@ final class FieldsReader {
|
||||||
|
|
||||||
private final IndexInput indexStream;
|
private final IndexInput indexStream;
|
||||||
private int size;
|
private int size;
|
||||||
|
private boolean closed;
|
||||||
|
|
||||||
private ThreadLocal fieldsStreamTL = new ThreadLocal();
|
private ThreadLocal fieldsStreamTL = new ThreadLocal();
|
||||||
|
|
||||||
|
@ -58,6 +60,15 @@ final class FieldsReader {
|
||||||
size = (int) (indexStream.length() / 8);
|
size = (int) (indexStream.length() / 8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @throws AlreadyClosedException if this FieldsReader is closed
|
||||||
|
*/
|
||||||
|
protected final void ensureOpen() throws AlreadyClosedException {
|
||||||
|
if (closed) {
|
||||||
|
throw new AlreadyClosedException("this FieldsReader is closed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Closes the underlying {@link org.apache.lucene.store.IndexInput} streams, including any ones associated with a
|
* Closes the underlying {@link org.apache.lucene.store.IndexInput} streams, including any ones associated with a
|
||||||
* lazy implementation of a Field. This means that the Fields values will not be accessible.
|
* lazy implementation of a Field. This means that the Fields values will not be accessible.
|
||||||
|
@ -65,13 +76,16 @@ final class FieldsReader {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
final void close() throws IOException {
|
final void close() throws IOException {
|
||||||
fieldsStream.close();
|
if (!closed) {
|
||||||
cloneableFieldsStream.close();
|
fieldsStream.close();
|
||||||
indexStream.close();
|
cloneableFieldsStream.close();
|
||||||
IndexInput localFieldsStream = (IndexInput) fieldsStreamTL.get();
|
indexStream.close();
|
||||||
if (localFieldsStream != null) {
|
IndexInput localFieldsStream = (IndexInput) fieldsStreamTL.get();
|
||||||
localFieldsStream.close();
|
if (localFieldsStream != null) {
|
||||||
fieldsStreamTL.set(null);
|
localFieldsStream.close();
|
||||||
|
fieldsStreamTL.set(null);
|
||||||
|
}
|
||||||
|
closed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -323,6 +337,7 @@ final class FieldsReader {
|
||||||
* binaryValue() must be set.
|
* binaryValue() must be set.
|
||||||
*/
|
*/
|
||||||
public byte[] binaryValue() {
|
public byte[] binaryValue() {
|
||||||
|
ensureOpen();
|
||||||
if (fieldsData == null) {
|
if (fieldsData == null) {
|
||||||
final byte[] b = new byte[toRead];
|
final byte[] b = new byte[toRead];
|
||||||
IndexInput localFieldsStream = getFieldStream();
|
IndexInput localFieldsStream = getFieldStream();
|
||||||
|
@ -349,6 +364,7 @@ final class FieldsReader {
|
||||||
* and binaryValue() must be set.
|
* and binaryValue() must be set.
|
||||||
*/
|
*/
|
||||||
public Reader readerValue() {
|
public Reader readerValue() {
|
||||||
|
ensureOpen();
|
||||||
return fieldsData instanceof Reader ? (Reader) fieldsData : null;
|
return fieldsData instanceof Reader ? (Reader) fieldsData : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -358,6 +374,7 @@ final class FieldsReader {
|
||||||
* binaryValue() must be set.
|
* binaryValue() must be set.
|
||||||
*/
|
*/
|
||||||
public String stringValue() {
|
public String stringValue() {
|
||||||
|
ensureOpen();
|
||||||
if (fieldsData == null) {
|
if (fieldsData == null) {
|
||||||
IndexInput localFieldsStream = getFieldStream();
|
IndexInput localFieldsStream = getFieldStream();
|
||||||
try {
|
try {
|
||||||
|
@ -380,18 +397,22 @@ final class FieldsReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getPointer() {
|
public long getPointer() {
|
||||||
|
ensureOpen();
|
||||||
return pointer;
|
return pointer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPointer(long pointer) {
|
public void setPointer(long pointer) {
|
||||||
|
ensureOpen();
|
||||||
this.pointer = pointer;
|
this.pointer = pointer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getToRead() {
|
public int getToRead() {
|
||||||
|
ensureOpen();
|
||||||
return toRead;
|
return toRead;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setToRead(int toRead) {
|
public void setToRead(int toRead) {
|
||||||
|
ensureOpen();
|
||||||
this.toRead = toRead;
|
this.toRead = toRead;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,43 +92,84 @@ public class FilterIndexReader extends IndexReader {
|
||||||
|
|
||||||
public TermFreqVector[] getTermFreqVectors(int docNumber)
|
public TermFreqVector[] getTermFreqVectors(int docNumber)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return in.getTermFreqVectors(docNumber);
|
return in.getTermFreqVectors(docNumber);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermFreqVector getTermFreqVector(int docNumber, String field)
|
public TermFreqVector getTermFreqVector(int docNumber, String field)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return in.getTermFreqVector(docNumber, field);
|
return in.getTermFreqVector(docNumber, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int numDocs() { return in.numDocs(); }
|
public int numDocs() {
|
||||||
public int maxDoc() { return in.maxDoc(); }
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return in.numDocs();
|
||||||
|
}
|
||||||
|
|
||||||
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException { return in.document(n, fieldSelector); }
|
public int maxDoc() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return in.maxDoc();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.document(n, fieldSelector);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isDeleted(int n) {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return in.isDeleted(n);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasDeletions() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return in.hasDeletions();
|
||||||
|
}
|
||||||
|
|
||||||
public boolean isDeleted(int n) { return in.isDeleted(n); }
|
|
||||||
public boolean hasDeletions() { return in.hasDeletions(); }
|
|
||||||
protected void doUndeleteAll() throws CorruptIndexException, IOException {in.undeleteAll();}
|
protected void doUndeleteAll() throws CorruptIndexException, IOException {in.undeleteAll();}
|
||||||
|
|
||||||
public boolean hasNorms(String field) throws IOException {
|
public boolean hasNorms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return in.hasNorms(field);
|
return in.hasNorms(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] norms(String f) throws IOException { return in.norms(f); }
|
public byte[] norms(String f) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.norms(f);
|
||||||
|
}
|
||||||
|
|
||||||
public void norms(String f, byte[] bytes, int offset) throws IOException {
|
public void norms(String f, byte[] bytes, int offset) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
in.norms(f, bytes, offset);
|
in.norms(f, bytes, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void doSetNorm(int d, String f, byte b) throws CorruptIndexException, IOException {
|
protected void doSetNorm(int d, String f, byte b) throws CorruptIndexException, IOException {
|
||||||
in.setNorm(d, f, b);
|
in.setNorm(d, f, b);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms() throws IOException { return in.terms(); }
|
public TermEnum terms() throws IOException {
|
||||||
public TermEnum terms(Term t) throws IOException { return in.terms(t); }
|
ensureOpen();
|
||||||
|
return in.terms();
|
||||||
|
}
|
||||||
|
|
||||||
public int docFreq(Term t) throws IOException { return in.docFreq(t); }
|
public TermEnum terms(Term t) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.terms(t);
|
||||||
|
}
|
||||||
|
|
||||||
public TermDocs termDocs() throws IOException { return in.termDocs(); }
|
public int docFreq(Term t) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.docFreq(t);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TermDocs termDocs() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.termDocs();
|
||||||
|
}
|
||||||
|
|
||||||
public TermPositions termPositions() throws IOException {
|
public TermPositions termPositions() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return in.termPositions();
|
return in.termPositions();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,9 +179,17 @@ public class FilterIndexReader extends IndexReader {
|
||||||
|
|
||||||
|
|
||||||
public Collection getFieldNames(IndexReader.FieldOption fieldNames) {
|
public Collection getFieldNames(IndexReader.FieldOption fieldNames) {
|
||||||
|
ensureOpen();
|
||||||
return in.getFieldNames(fieldNames);
|
return in.getFieldNames(fieldNames);
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getVersion() { return in.getVersion(); }
|
public long getVersion() {
|
||||||
public boolean isCurrent() throws CorruptIndexException, IOException { return in.isCurrent(); }
|
ensureOpen();
|
||||||
|
return in.getVersion();
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isCurrent() throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
|
return in.isCurrent();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.Lock;
|
import org.apache.lucene.store.Lock;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
|
@ -115,7 +116,16 @@ public abstract class IndexReader {
|
||||||
private boolean directoryOwner;
|
private boolean directoryOwner;
|
||||||
private boolean closeDirectory;
|
private boolean closeDirectory;
|
||||||
private IndexDeletionPolicy deletionPolicy;
|
private IndexDeletionPolicy deletionPolicy;
|
||||||
private boolean isClosed;
|
private boolean closed;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @throws AlreadyClosedException if this IndexReader is closed
|
||||||
|
*/
|
||||||
|
protected final void ensureOpen() throws AlreadyClosedException {
|
||||||
|
if (closed) {
|
||||||
|
throw new AlreadyClosedException("this IndexReader is closed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private SegmentInfos segmentInfos;
|
private SegmentInfos segmentInfos;
|
||||||
private Lock writeLock;
|
private Lock writeLock;
|
||||||
|
@ -208,8 +218,12 @@ public abstract class IndexReader {
|
||||||
}.run();
|
}.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the directory this index resides in. */
|
/** Returns the directory this index resides in.
|
||||||
public Directory directory() { return directory; }
|
*/
|
||||||
|
public Directory directory() {
|
||||||
|
ensureOpen();
|
||||||
|
return directory;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the time the index in the named directory was last modified.
|
* Returns the time the index in the named directory was last modified.
|
||||||
|
@ -301,6 +315,7 @@ public abstract class IndexReader {
|
||||||
* Version number when this IndexReader was opened.
|
* Version number when this IndexReader was opened.
|
||||||
*/
|
*/
|
||||||
public long getVersion() {
|
public long getVersion() {
|
||||||
|
ensureOpen();
|
||||||
return segmentInfos.getVersion();
|
return segmentInfos.getVersion();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -313,6 +328,7 @@ public abstract class IndexReader {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public boolean isCurrent() throws CorruptIndexException, IOException {
|
public boolean isCurrent() throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
return SegmentInfos.readCurrentVersion(directory) == segmentInfos.getVersion();
|
return SegmentInfos.readCurrentVersion(directory) == segmentInfos.getVersion();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -321,7 +337,8 @@ public abstract class IndexReader {
|
||||||
* @return <code>true</code> if the index is optimized; <code>false</code> otherwise
|
* @return <code>true</code> if the index is optimized; <code>false</code> otherwise
|
||||||
*/
|
*/
|
||||||
public boolean isOptimized() {
|
public boolean isOptimized() {
|
||||||
return segmentInfos.size() == 1 && hasDeletions() == false;
|
ensureOpen();
|
||||||
|
return segmentInfos.size() == 1 && hasDeletions() == false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -407,6 +424,7 @@ public abstract class IndexReader {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public Document document(int n) throws CorruptIndexException, IOException {
|
public Document document(int n) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
return document(n, null);
|
return document(n, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -445,6 +463,7 @@ public abstract class IndexReader {
|
||||||
public boolean hasNorms(String field) throws IOException {
|
public boolean hasNorms(String field) throws IOException {
|
||||||
// backward compatible implementation.
|
// backward compatible implementation.
|
||||||
// SegmentReader has an efficient implementation.
|
// SegmentReader has an efficient implementation.
|
||||||
|
ensureOpen();
|
||||||
return norms(field) != null;
|
return norms(field) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -477,11 +496,11 @@ public abstract class IndexReader {
|
||||||
* @throws LockObtainFailedException if another writer
|
* @throws LockObtainFailedException if another writer
|
||||||
* has this index open (<code>write.lock</code> could not
|
* has this index open (<code>write.lock</code> could not
|
||||||
* be obtained)
|
* be obtained)
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public final synchronized void setNorm(int doc, String field, byte value)
|
public final synchronized void setNorm(int doc, String field, byte value)
|
||||||
throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
if(directoryOwner)
|
if(directoryOwner)
|
||||||
acquireWriteLock();
|
acquireWriteLock();
|
||||||
hasChanges = true;
|
hasChanges = true;
|
||||||
|
@ -504,27 +523,31 @@ public abstract class IndexReader {
|
||||||
* @throws LockObtainFailedException if another writer
|
* @throws LockObtainFailedException if another writer
|
||||||
* has this index open (<code>write.lock</code> could not
|
* has this index open (<code>write.lock</code> could not
|
||||||
* be obtained)
|
* be obtained)
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public void setNorm(int doc, String field, float value)
|
public void setNorm(int doc, String field, float value)
|
||||||
throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
setNorm(doc, field, Similarity.encodeNorm(value));
|
setNorm(doc, field, Similarity.encodeNorm(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns an enumeration of all the terms in the index.
|
/** Returns an enumeration of all the terms in the index.
|
||||||
* The enumeration is ordered by Term.compareTo(). Each term
|
* The enumeration is ordered by Term.compareTo(). Each term
|
||||||
* is greater than all that precede it in the enumeration.
|
* is greater than all that precede it in the enumeration.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public abstract TermEnum terms() throws IOException;
|
public abstract TermEnum terms() throws IOException;
|
||||||
|
|
||||||
/** Returns an enumeration of all terms after a given term.
|
/** Returns an enumeration of all terms after a given term.
|
||||||
* The enumeration is ordered by Term.compareTo(). Each term
|
* The enumeration is ordered by Term.compareTo(). Each term
|
||||||
* is greater than all that precede it in the enumeration.
|
* is greater than all that precede it in the enumeration.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public abstract TermEnum terms(Term t) throws IOException;
|
public abstract TermEnum terms(Term t) throws IOException;
|
||||||
|
|
||||||
/** Returns the number of documents containing the term <code>t</code>. */
|
/** Returns the number of documents containing the term <code>t</code>.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
|
*/
|
||||||
public abstract int docFreq(Term t) throws IOException;
|
public abstract int docFreq(Term t) throws IOException;
|
||||||
|
|
||||||
/** Returns an enumeration of all the documents which contain
|
/** Returns an enumeration of all the documents which contain
|
||||||
|
@ -536,14 +559,18 @@ public abstract class IndexReader {
|
||||||
* </ul>
|
* </ul>
|
||||||
* <p>The enumeration is ordered by document number. Each document number
|
* <p>The enumeration is ordered by document number. Each document number
|
||||||
* is greater than all that precede it in the enumeration.
|
* is greater than all that precede it in the enumeration.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public TermDocs termDocs(Term term) throws IOException {
|
public TermDocs termDocs(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
TermDocs termDocs = termDocs();
|
TermDocs termDocs = termDocs();
|
||||||
termDocs.seek(term);
|
termDocs.seek(term);
|
||||||
return termDocs;
|
return termDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns an unpositioned {@link TermDocs} enumerator. */
|
/** Returns an unpositioned {@link TermDocs} enumerator.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
|
*/
|
||||||
public abstract TermDocs termDocs() throws IOException;
|
public abstract TermDocs termDocs() throws IOException;
|
||||||
|
|
||||||
/** Returns an enumeration of all the documents which contain
|
/** Returns an enumeration of all the documents which contain
|
||||||
|
@ -561,14 +588,18 @@ public abstract class IndexReader {
|
||||||
* <p> This positional information faciliates phrase and proximity searching.
|
* <p> This positional information faciliates phrase and proximity searching.
|
||||||
* <p>The enumeration is ordered by document number. Each document number is
|
* <p>The enumeration is ordered by document number. Each document number is
|
||||||
* greater than all that precede it in the enumeration.
|
* greater than all that precede it in the enumeration.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public TermPositions termPositions(Term term) throws IOException {
|
public TermPositions termPositions(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
TermPositions termPositions = termPositions();
|
TermPositions termPositions = termPositions();
|
||||||
termPositions.seek(term);
|
termPositions.seek(term);
|
||||||
return termPositions;
|
return termPositions;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns an unpositioned {@link TermPositions} enumerator. */
|
/** Returns an unpositioned {@link TermPositions} enumerator.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
|
*/
|
||||||
public abstract TermPositions termPositions() throws IOException;
|
public abstract TermPositions termPositions() throws IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -584,10 +615,9 @@ public abstract class IndexReader {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
private void acquireWriteLock() throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
private void acquireWriteLock() throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
if (stale)
|
if (stale)
|
||||||
throw new StaleReaderException("IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
|
throw new StaleReaderException("IndexReader out of date and no longer valid for delete, undelete, or setNorm operations");
|
||||||
if (isClosed)
|
|
||||||
throw new IOException("this reader is closed");
|
|
||||||
|
|
||||||
if (writeLock == null) {
|
if (writeLock == null) {
|
||||||
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
|
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
|
||||||
|
@ -620,10 +650,10 @@ public abstract class IndexReader {
|
||||||
* @throws LockObtainFailedException if another writer
|
* @throws LockObtainFailedException if another writer
|
||||||
* has this index open (<code>write.lock</code> could not
|
* has this index open (<code>write.lock</code> could not
|
||||||
* be obtained)
|
* be obtained)
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public final synchronized void deleteDocument(int docNum) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
public final synchronized void deleteDocument(int docNum) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
if(directoryOwner)
|
if(directoryOwner)
|
||||||
acquireWriteLock();
|
acquireWriteLock();
|
||||||
hasChanges = true;
|
hasChanges = true;
|
||||||
|
@ -652,10 +682,10 @@ public abstract class IndexReader {
|
||||||
* @throws LockObtainFailedException if another writer
|
* @throws LockObtainFailedException if another writer
|
||||||
* has this index open (<code>write.lock</code> could not
|
* has this index open (<code>write.lock</code> could not
|
||||||
* be obtained)
|
* be obtained)
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public final int deleteDocuments(Term term) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
public final int deleteDocuments(Term term) throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
TermDocs docs = termDocs(term);
|
TermDocs docs = termDocs(term);
|
||||||
if (docs == null) return 0;
|
if (docs == null) return 0;
|
||||||
int n = 0;
|
int n = 0;
|
||||||
|
@ -678,10 +708,10 @@ public abstract class IndexReader {
|
||||||
* has this index open (<code>write.lock</code> could not
|
* has this index open (<code>write.lock</code> could not
|
||||||
* be obtained)
|
* be obtained)
|
||||||
* @throws CorruptIndexException if the index is corrupt
|
* @throws CorruptIndexException if the index is corrupt
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public final synchronized void undeleteAll() throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
public final synchronized void undeleteAll() throws StaleReaderException, CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
|
ensureOpen();
|
||||||
if(directoryOwner)
|
if(directoryOwner)
|
||||||
acquireWriteLock();
|
acquireWriteLock();
|
||||||
hasChanges = true;
|
hasChanges = true;
|
||||||
|
@ -790,19 +820,16 @@ public abstract class IndexReader {
|
||||||
* Closes files associated with this index.
|
* Closes files associated with this index.
|
||||||
* Also saves any new deletions to disk.
|
* Also saves any new deletions to disk.
|
||||||
* No other methods should be called after this has been called.
|
* No other methods should be called after this has been called.
|
||||||
* @throws IOException if this reader was closed already
|
* @throws IOException if there is a low-level IO error
|
||||||
* or there is a low-level IO error
|
|
||||||
*/
|
*/
|
||||||
public final synchronized void close() throws IOException {
|
public final synchronized void close() throws IOException {
|
||||||
if (directoryOwner && isClosed) {
|
if (!closed) {
|
||||||
throw new IOException("this reader is already closed");
|
commit();
|
||||||
}
|
doClose();
|
||||||
commit();
|
if (directoryOwner)
|
||||||
doClose();
|
closed = true;
|
||||||
if(closeDirectory)
|
if(closeDirectory)
|
||||||
directory.close();
|
directory.close();
|
||||||
if (directoryOwner) {
|
|
||||||
isClosed = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.Lock;
|
import org.apache.lucene.store.Lock;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -208,6 +209,16 @@ public class IndexWriter {
|
||||||
private boolean useCompoundFile = true;
|
private boolean useCompoundFile = true;
|
||||||
|
|
||||||
private boolean closeDir;
|
private boolean closeDir;
|
||||||
|
private boolean closed;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @throws AlreadyClosedException if this IndexWriter is closed
|
||||||
|
*/
|
||||||
|
protected final void ensureOpen() throws AlreadyClosedException {
|
||||||
|
if (closed) {
|
||||||
|
throw new AlreadyClosedException("this IndexWriter is closed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Get the current setting of whether to use the compound file format.
|
/** Get the current setting of whether to use the compound file format.
|
||||||
* Note that this just returns the value you set with setUseCompoundFile(boolean)
|
* Note that this just returns the value you set with setUseCompoundFile(boolean)
|
||||||
|
@ -215,6 +226,7 @@ public class IndexWriter {
|
||||||
* @see #setUseCompoundFile(boolean)
|
* @see #setUseCompoundFile(boolean)
|
||||||
*/
|
*/
|
||||||
public boolean getUseCompoundFile() {
|
public boolean getUseCompoundFile() {
|
||||||
|
ensureOpen();
|
||||||
return useCompoundFile;
|
return useCompoundFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -223,6 +235,7 @@ public class IndexWriter {
|
||||||
* is finished. This is done regardless of what directory is in use.
|
* is finished. This is done regardless of what directory is in use.
|
||||||
*/
|
*/
|
||||||
public void setUseCompoundFile(boolean value) {
|
public void setUseCompoundFile(boolean value) {
|
||||||
|
ensureOpen();
|
||||||
useCompoundFile = value;
|
useCompoundFile = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,6 +244,7 @@ public class IndexWriter {
|
||||||
* @see Similarity#setDefault(Similarity)
|
* @see Similarity#setDefault(Similarity)
|
||||||
*/
|
*/
|
||||||
public void setSimilarity(Similarity similarity) {
|
public void setSimilarity(Similarity similarity) {
|
||||||
|
ensureOpen();
|
||||||
this.similarity = similarity;
|
this.similarity = similarity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,6 +253,7 @@ public class IndexWriter {
|
||||||
* <p>This defaults to the current value of {@link Similarity#getDefault()}.
|
* <p>This defaults to the current value of {@link Similarity#getDefault()}.
|
||||||
*/
|
*/
|
||||||
public Similarity getSimilarity() {
|
public Similarity getSimilarity() {
|
||||||
|
ensureOpen();
|
||||||
return this.similarity;
|
return this.similarity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,6 +279,7 @@ public class IndexWriter {
|
||||||
* @see #DEFAULT_TERM_INDEX_INTERVAL
|
* @see #DEFAULT_TERM_INDEX_INTERVAL
|
||||||
*/
|
*/
|
||||||
public void setTermIndexInterval(int interval) {
|
public void setTermIndexInterval(int interval) {
|
||||||
|
ensureOpen();
|
||||||
this.termIndexInterval = interval;
|
this.termIndexInterval = interval;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,7 +287,10 @@ public class IndexWriter {
|
||||||
*
|
*
|
||||||
* @see #setTermIndexInterval(int)
|
* @see #setTermIndexInterval(int)
|
||||||
*/
|
*/
|
||||||
public int getTermIndexInterval() { return termIndexInterval; }
|
public int getTermIndexInterval() {
|
||||||
|
ensureOpen();
|
||||||
|
return termIndexInterval;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs an IndexWriter for the index in <code>path</code>.
|
* Constructs an IndexWriter for the index in <code>path</code>.
|
||||||
|
@ -580,6 +599,7 @@ public class IndexWriter {
|
||||||
* <p>The default value is {@link Integer#MAX_VALUE}.
|
* <p>The default value is {@link Integer#MAX_VALUE}.
|
||||||
*/
|
*/
|
||||||
public void setMaxMergeDocs(int maxMergeDocs) {
|
public void setMaxMergeDocs(int maxMergeDocs) {
|
||||||
|
ensureOpen();
|
||||||
this.maxMergeDocs = maxMergeDocs;
|
this.maxMergeDocs = maxMergeDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -587,6 +607,7 @@ public class IndexWriter {
|
||||||
* @see #setMaxMergeDocs
|
* @see #setMaxMergeDocs
|
||||||
*/
|
*/
|
||||||
public int getMaxMergeDocs() {
|
public int getMaxMergeDocs() {
|
||||||
|
ensureOpen();
|
||||||
return maxMergeDocs;
|
return maxMergeDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -603,6 +624,7 @@ public class IndexWriter {
|
||||||
* By default, no more than 10,000 terms will be indexed for a field.
|
* By default, no more than 10,000 terms will be indexed for a field.
|
||||||
*/
|
*/
|
||||||
public void setMaxFieldLength(int maxFieldLength) {
|
public void setMaxFieldLength(int maxFieldLength) {
|
||||||
|
ensureOpen();
|
||||||
this.maxFieldLength = maxFieldLength;
|
this.maxFieldLength = maxFieldLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -610,6 +632,7 @@ public class IndexWriter {
|
||||||
* @see #setMaxFieldLength
|
* @see #setMaxFieldLength
|
||||||
*/
|
*/
|
||||||
public int getMaxFieldLength() {
|
public int getMaxFieldLength() {
|
||||||
|
ensureOpen();
|
||||||
return maxFieldLength;
|
return maxFieldLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -624,6 +647,7 @@ public class IndexWriter {
|
||||||
* @throws IllegalArgumentException if maxBufferedDocs is smaller than 2
|
* @throws IllegalArgumentException if maxBufferedDocs is smaller than 2
|
||||||
*/
|
*/
|
||||||
public void setMaxBufferedDocs(int maxBufferedDocs) {
|
public void setMaxBufferedDocs(int maxBufferedDocs) {
|
||||||
|
ensureOpen();
|
||||||
if (maxBufferedDocs < 2)
|
if (maxBufferedDocs < 2)
|
||||||
throw new IllegalArgumentException("maxBufferedDocs must at least be 2");
|
throw new IllegalArgumentException("maxBufferedDocs must at least be 2");
|
||||||
this.minMergeDocs = maxBufferedDocs;
|
this.minMergeDocs = maxBufferedDocs;
|
||||||
|
@ -633,6 +657,7 @@ public class IndexWriter {
|
||||||
* @see #setMaxBufferedDocs
|
* @see #setMaxBufferedDocs
|
||||||
*/
|
*/
|
||||||
public int getMaxBufferedDocs() {
|
public int getMaxBufferedDocs() {
|
||||||
|
ensureOpen();
|
||||||
return minMergeDocs;
|
return minMergeDocs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -646,6 +671,7 @@ public class IndexWriter {
|
||||||
* @throws IllegalArgumentException if maxBufferedDeleteTerms is smaller than 1</p>
|
* @throws IllegalArgumentException if maxBufferedDeleteTerms is smaller than 1</p>
|
||||||
*/
|
*/
|
||||||
public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
|
public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
|
||||||
|
ensureOpen();
|
||||||
if (maxBufferedDeleteTerms < 1)
|
if (maxBufferedDeleteTerms < 1)
|
||||||
throw new IllegalArgumentException("maxBufferedDeleteTerms must at least be 1");
|
throw new IllegalArgumentException("maxBufferedDeleteTerms must at least be 1");
|
||||||
this.maxBufferedDeleteTerms = maxBufferedDeleteTerms;
|
this.maxBufferedDeleteTerms = maxBufferedDeleteTerms;
|
||||||
|
@ -655,6 +681,7 @@ public class IndexWriter {
|
||||||
* @see #setMaxBufferedDeleteTerms
|
* @see #setMaxBufferedDeleteTerms
|
||||||
*/
|
*/
|
||||||
public int getMaxBufferedDeleteTerms() {
|
public int getMaxBufferedDeleteTerms() {
|
||||||
|
ensureOpen();
|
||||||
return maxBufferedDeleteTerms;
|
return maxBufferedDeleteTerms;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -669,6 +696,7 @@ public class IndexWriter {
|
||||||
* <p>This must never be less than 2. The default value is 10.
|
* <p>This must never be less than 2. The default value is 10.
|
||||||
*/
|
*/
|
||||||
public void setMergeFactor(int mergeFactor) {
|
public void setMergeFactor(int mergeFactor) {
|
||||||
|
ensureOpen();
|
||||||
if (mergeFactor < 2)
|
if (mergeFactor < 2)
|
||||||
throw new IllegalArgumentException("mergeFactor cannot be less than 2");
|
throw new IllegalArgumentException("mergeFactor cannot be less than 2");
|
||||||
this.mergeFactor = mergeFactor;
|
this.mergeFactor = mergeFactor;
|
||||||
|
@ -678,6 +706,7 @@ public class IndexWriter {
|
||||||
* @see #setMergeFactor
|
* @see #setMergeFactor
|
||||||
*/
|
*/
|
||||||
public int getMergeFactor() {
|
public int getMergeFactor() {
|
||||||
|
ensureOpen();
|
||||||
return mergeFactor;
|
return mergeFactor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -701,6 +730,7 @@ public class IndexWriter {
|
||||||
* to this.
|
* to this.
|
||||||
*/
|
*/
|
||||||
public void setInfoStream(PrintStream infoStream) {
|
public void setInfoStream(PrintStream infoStream) {
|
||||||
|
ensureOpen();
|
||||||
this.infoStream = infoStream;
|
this.infoStream = infoStream;
|
||||||
deleter.setInfoStream(infoStream);
|
deleter.setInfoStream(infoStream);
|
||||||
}
|
}
|
||||||
|
@ -709,6 +739,7 @@ public class IndexWriter {
|
||||||
* @see #setInfoStream
|
* @see #setInfoStream
|
||||||
*/
|
*/
|
||||||
public PrintStream getInfoStream() {
|
public PrintStream getInfoStream() {
|
||||||
|
ensureOpen();
|
||||||
return infoStream;
|
return infoStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -717,6 +748,7 @@ public class IndexWriter {
|
||||||
* @see #setDefaultWriteLockTimeout to change the default value for all instances of IndexWriter.
|
* @see #setDefaultWriteLockTimeout to change the default value for all instances of IndexWriter.
|
||||||
*/
|
*/
|
||||||
public void setWriteLockTimeout(long writeLockTimeout) {
|
public void setWriteLockTimeout(long writeLockTimeout) {
|
||||||
|
ensureOpen();
|
||||||
this.writeLockTimeout = writeLockTimeout;
|
this.writeLockTimeout = writeLockTimeout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -724,6 +756,7 @@ public class IndexWriter {
|
||||||
* @see #setWriteLockTimeout
|
* @see #setWriteLockTimeout
|
||||||
*/
|
*/
|
||||||
public long getWriteLockTimeout() {
|
public long getWriteLockTimeout() {
|
||||||
|
ensureOpen();
|
||||||
return writeLockTimeout;
|
return writeLockTimeout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -777,22 +810,26 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public synchronized void close() throws CorruptIndexException, IOException {
|
public synchronized void close() throws CorruptIndexException, IOException {
|
||||||
flushRamSegments();
|
if (!closed) {
|
||||||
|
flushRamSegments();
|
||||||
|
|
||||||
if (commitPending) {
|
if (commitPending) {
|
||||||
segmentInfos.write(directory); // now commit changes
|
segmentInfos.write(directory); // now commit changes
|
||||||
deleter.checkpoint(segmentInfos, true);
|
deleter.checkpoint(segmentInfos, true);
|
||||||
commitPending = false;
|
commitPending = false;
|
||||||
rollbackSegmentInfos = null;
|
rollbackSegmentInfos = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
ramDirectory.close();
|
ramDirectory.close();
|
||||||
if (writeLock != null) {
|
if (writeLock != null) {
|
||||||
writeLock.release(); // release write lock
|
writeLock.release(); // release write lock
|
||||||
writeLock = null;
|
writeLock = null;
|
||||||
|
}
|
||||||
|
closed = true;
|
||||||
|
|
||||||
|
if(closeDir)
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
if(closeDir)
|
|
||||||
directory.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Release the write lock, if needed. */
|
/** Release the write lock, if needed. */
|
||||||
|
@ -809,17 +846,20 @@ public class IndexWriter {
|
||||||
|
|
||||||
/** Returns the Directory used by this index. */
|
/** Returns the Directory used by this index. */
|
||||||
public Directory getDirectory() {
|
public Directory getDirectory() {
|
||||||
return directory;
|
ensureOpen();
|
||||||
|
return directory;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns the analyzer used by this index. */
|
/** Returns the analyzer used by this index. */
|
||||||
public Analyzer getAnalyzer() {
|
public Analyzer getAnalyzer() {
|
||||||
return analyzer;
|
ensureOpen();
|
||||||
|
return analyzer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** Returns the number of documents currently in this index. */
|
/** Returns the number of documents currently in this index. */
|
||||||
public synchronized int docCount() {
|
public synchronized int docCount() {
|
||||||
|
ensureOpen();
|
||||||
int count = ramSegmentInfos.size();
|
int count = ramSegmentInfos.size();
|
||||||
for (int i = 0; i < segmentInfos.size(); i++) {
|
for (int i = 0; i < segmentInfos.size(); i++) {
|
||||||
SegmentInfo si = segmentInfos.info(i);
|
SegmentInfo si = segmentInfos.info(i);
|
||||||
|
@ -897,6 +937,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public void addDocument(Document doc, Analyzer analyzer) throws CorruptIndexException, IOException {
|
public void addDocument(Document doc, Analyzer analyzer) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
SegmentInfo newSegmentInfo = buildSingleDocSegment(doc, analyzer);
|
SegmentInfo newSegmentInfo = buildSingleDocSegment(doc, analyzer);
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
ramSegmentInfos.addElement(newSegmentInfo);
|
ramSegmentInfos.addElement(newSegmentInfo);
|
||||||
|
@ -922,6 +963,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public synchronized void deleteDocuments(Term term) throws CorruptIndexException, IOException {
|
public synchronized void deleteDocuments(Term term) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
bufferDeleteTerm(term);
|
bufferDeleteTerm(term);
|
||||||
maybeFlushRamSegments();
|
maybeFlushRamSegments();
|
||||||
}
|
}
|
||||||
|
@ -935,6 +977,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public synchronized void deleteDocuments(Term[] terms) throws CorruptIndexException, IOException {
|
public synchronized void deleteDocuments(Term[] terms) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
for (int i = 0; i < terms.length; i++) {
|
for (int i = 0; i < terms.length; i++) {
|
||||||
bufferDeleteTerm(terms[i]);
|
bufferDeleteTerm(terms[i]);
|
||||||
}
|
}
|
||||||
|
@ -954,6 +997,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public void updateDocument(Term term, Document doc) throws CorruptIndexException, IOException {
|
public void updateDocument(Term term, Document doc) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
updateDocument(term, doc, getAnalyzer());
|
updateDocument(term, doc, getAnalyzer());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -972,6 +1016,7 @@ public class IndexWriter {
|
||||||
*/
|
*/
|
||||||
public void updateDocument(Term term, Document doc, Analyzer analyzer)
|
public void updateDocument(Term term, Document doc, Analyzer analyzer)
|
||||||
throws CorruptIndexException, IOException {
|
throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
SegmentInfo newSegmentInfo = buildSingleDocSegment(doc, analyzer);
|
SegmentInfo newSegmentInfo = buildSingleDocSegment(doc, analyzer);
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
bufferDeleteTerm(term);
|
bufferDeleteTerm(term);
|
||||||
|
@ -1107,6 +1152,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public synchronized void optimize() throws CorruptIndexException, IOException {
|
public synchronized void optimize() throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
flushRamSegments();
|
flushRamSegments();
|
||||||
while (segmentInfos.size() > 1 ||
|
while (segmentInfos.size() > 1 ||
|
||||||
(segmentInfos.size() == 1 &&
|
(segmentInfos.size() == 1 &&
|
||||||
|
@ -1200,6 +1246,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public void abort() throws IOException {
|
public void abort() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
if (!autoCommit) {
|
if (!autoCommit) {
|
||||||
|
|
||||||
// Keep the same segmentInfos instance but replace all
|
// Keep the same segmentInfos instance but replace all
|
||||||
|
@ -1290,6 +1337,7 @@ public class IndexWriter {
|
||||||
public synchronized void addIndexes(Directory[] dirs)
|
public synchronized void addIndexes(Directory[] dirs)
|
||||||
throws CorruptIndexException, IOException {
|
throws CorruptIndexException, IOException {
|
||||||
|
|
||||||
|
ensureOpen();
|
||||||
optimize(); // start with zero or 1 seg
|
optimize(); // start with zero or 1 seg
|
||||||
|
|
||||||
int start = segmentInfos.size();
|
int start = segmentInfos.size();
|
||||||
|
@ -1375,6 +1423,7 @@ public class IndexWriter {
|
||||||
|
|
||||||
// 1 flush ram segments
|
// 1 flush ram segments
|
||||||
|
|
||||||
|
ensureOpen();
|
||||||
flushRamSegments();
|
flushRamSegments();
|
||||||
|
|
||||||
// 2 copy segment infos and find the highest level from dirs
|
// 2 copy segment infos and find the highest level from dirs
|
||||||
|
@ -1479,6 +1528,7 @@ public class IndexWriter {
|
||||||
public synchronized void addIndexes(IndexReader[] readers)
|
public synchronized void addIndexes(IndexReader[] readers)
|
||||||
throws CorruptIndexException, IOException {
|
throws CorruptIndexException, IOException {
|
||||||
|
|
||||||
|
ensureOpen();
|
||||||
optimize(); // start with zero or 1 seg
|
optimize(); // start with zero or 1 seg
|
||||||
|
|
||||||
final String mergedName = newSegmentName();
|
final String mergedName = newSegmentName();
|
||||||
|
@ -1610,6 +1660,7 @@ public class IndexWriter {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public final synchronized void flush() throws CorruptIndexException, IOException {
|
public final synchronized void flush() throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
flushRamSegments();
|
flushRamSegments();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1617,6 +1668,7 @@ public class IndexWriter {
|
||||||
* Useful for size management with flushRamDocs()
|
* Useful for size management with flushRamDocs()
|
||||||
*/
|
*/
|
||||||
public final long ramSizeInBytes() {
|
public final long ramSizeInBytes() {
|
||||||
|
ensureOpen();
|
||||||
return ramDirectory.sizeInBytes();
|
return ramDirectory.sizeInBytes();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1624,6 +1676,7 @@ public class IndexWriter {
|
||||||
* Useful when calling flushRamSegments()
|
* Useful when calling flushRamSegments()
|
||||||
*/
|
*/
|
||||||
public final synchronized int numRamDocs() {
|
public final synchronized int numRamDocs() {
|
||||||
|
ensureOpen();
|
||||||
return ramSegmentInfos.size();
|
return ramSegmentInfos.size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,24 +72,21 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** Return an array of term frequency vectors for the specified document.
|
|
||||||
* The array contains a vector for each vectorized field in the document.
|
|
||||||
* Each vector vector contains term numbers and frequencies for all terms
|
|
||||||
* in a given vectorized field.
|
|
||||||
* If no such fields existed, the method returns null.
|
|
||||||
*/
|
|
||||||
public TermFreqVector[] getTermFreqVectors(int n) throws IOException {
|
public TermFreqVector[] getTermFreqVectors(int n) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
int i = readerIndex(n); // find segment num
|
int i = readerIndex(n); // find segment num
|
||||||
return subReaders[i].getTermFreqVectors(n - starts[i]); // dispatch to segment
|
return subReaders[i].getTermFreqVectors(n - starts[i]); // dispatch to segment
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermFreqVector getTermFreqVector(int n, String field)
|
public TermFreqVector getTermFreqVector(int n, String field)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
int i = readerIndex(n); // find segment num
|
int i = readerIndex(n); // find segment num
|
||||||
return subReaders[i].getTermFreqVector(n - starts[i], field);
|
return subReaders[i].getTermFreqVector(n - starts[i], field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized int numDocs() {
|
public synchronized int numDocs() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
if (numDocs == -1) { // check cache
|
if (numDocs == -1) { // check cache
|
||||||
int n = 0; // cache miss--recompute
|
int n = 0; // cache miss--recompute
|
||||||
for (int i = 0; i < subReaders.length; i++)
|
for (int i = 0; i < subReaders.length; i++)
|
||||||
|
@ -100,21 +97,27 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int maxDoc() {
|
public int maxDoc() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
return maxDoc;
|
return maxDoc;
|
||||||
}
|
}
|
||||||
|
|
||||||
// inherit javadoc
|
// inherit javadoc
|
||||||
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
int i = readerIndex(n); // find segment num
|
int i = readerIndex(n); // find segment num
|
||||||
return subReaders[i].document(n - starts[i], fieldSelector); // dispatch to segment reader
|
return subReaders[i].document(n - starts[i], fieldSelector); // dispatch to segment reader
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isDeleted(int n) {
|
public boolean isDeleted(int n) {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
int i = readerIndex(n); // find segment num
|
int i = readerIndex(n); // find segment num
|
||||||
return subReaders[i].isDeleted(n - starts[i]); // dispatch to segment reader
|
return subReaders[i].isDeleted(n - starts[i]); // dispatch to segment reader
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasDeletions() { return hasDeletions; }
|
public boolean hasDeletions() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return hasDeletions;
|
||||||
|
}
|
||||||
|
|
||||||
protected void doDelete(int n) throws CorruptIndexException, IOException {
|
protected void doDelete(int n) throws CorruptIndexException, IOException {
|
||||||
numDocs = -1; // invalidate cache
|
numDocs = -1; // invalidate cache
|
||||||
|
@ -153,6 +156,7 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasNorms(String field) throws IOException {
|
public boolean hasNorms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
for (int i = 0; i < subReaders.length; i++) {
|
for (int i = 0; i < subReaders.length; i++) {
|
||||||
if (subReaders[i].hasNorms(field)) return true;
|
if (subReaders[i].hasNorms(field)) return true;
|
||||||
}
|
}
|
||||||
|
@ -166,6 +170,7 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public synchronized byte[] norms(String field) throws IOException {
|
public synchronized byte[] norms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
byte[] bytes = (byte[])normsCache.get(field);
|
byte[] bytes = (byte[])normsCache.get(field);
|
||||||
if (bytes != null)
|
if (bytes != null)
|
||||||
return bytes; // cache hit
|
return bytes; // cache hit
|
||||||
|
@ -181,6 +186,7 @@ public class MultiReader extends IndexReader {
|
||||||
|
|
||||||
public synchronized void norms(String field, byte[] result, int offset)
|
public synchronized void norms(String field, byte[] result, int offset)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
byte[] bytes = (byte[])normsCache.get(field);
|
byte[] bytes = (byte[])normsCache.get(field);
|
||||||
if (bytes==null && !hasNorms(field)) bytes=fakeNorms();
|
if (bytes==null && !hasNorms(field)) bytes=fakeNorms();
|
||||||
if (bytes != null) // cache hit
|
if (bytes != null) // cache hit
|
||||||
|
@ -198,14 +204,17 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms() throws IOException {
|
public TermEnum terms() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new MultiTermEnum(subReaders, starts, null);
|
return new MultiTermEnum(subReaders, starts, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms(Term term) throws IOException {
|
public TermEnum terms(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new MultiTermEnum(subReaders, starts, term);
|
return new MultiTermEnum(subReaders, starts, term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int docFreq(Term t) throws IOException {
|
public int docFreq(Term t) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
int total = 0; // sum freqs in segments
|
int total = 0; // sum freqs in segments
|
||||||
for (int i = 0; i < subReaders.length; i++)
|
for (int i = 0; i < subReaders.length; i++)
|
||||||
total += subReaders[i].docFreq(t);
|
total += subReaders[i].docFreq(t);
|
||||||
|
@ -213,10 +222,12 @@ public class MultiReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermDocs termDocs() throws IOException {
|
public TermDocs termDocs() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new MultiTermDocs(subReaders, starts);
|
return new MultiTermDocs(subReaders, starts);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermPositions termPositions() throws IOException {
|
public TermPositions termPositions() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new MultiTermPositions(subReaders, starts);
|
return new MultiTermPositions(subReaders, starts);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,11 +255,9 @@ public class MultiReader extends IndexReader {
|
||||||
subReaders[i].close();
|
subReaders[i].close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @see IndexReader#getFieldNames(IndexReader.FieldOption)
|
|
||||||
*/
|
|
||||||
public Collection getFieldNames (IndexReader.FieldOption fieldNames) {
|
public Collection getFieldNames (IndexReader.FieldOption fieldNames) {
|
||||||
// maintain a unique set of field names
|
// maintain a unique set of field names
|
||||||
|
ensureOpen();
|
||||||
Set fieldSet = new HashSet();
|
Set fieldSet = new HashSet();
|
||||||
for (int i = 0; i < subReaders.length; i++) {
|
for (int i = 0; i < subReaders.length; i++) {
|
||||||
IndexReader reader = subReaders[i];
|
IndexReader reader = subReaders[i];
|
||||||
|
|
|
@ -66,8 +66,11 @@ public class ParallelReader extends IndexReader {
|
||||||
/** Construct a ParallelReader. */
|
/** Construct a ParallelReader. */
|
||||||
public ParallelReader() throws IOException { super(null); }
|
public ParallelReader() throws IOException { super(null); }
|
||||||
|
|
||||||
/** Add an IndexReader. */
|
/** Add an IndexReader.
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
|
*/
|
||||||
public void add(IndexReader reader) throws IOException {
|
public void add(IndexReader reader) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
add(reader, false);
|
add(reader, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,10 +82,12 @@ public class ParallelReader extends IndexReader {
|
||||||
* of documents
|
* of documents
|
||||||
* @throws IllegalArgumentException if not all indexes have the same value
|
* @throws IllegalArgumentException if not all indexes have the same value
|
||||||
* of {@link IndexReader#maxDoc()}
|
* of {@link IndexReader#maxDoc()}
|
||||||
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public void add(IndexReader reader, boolean ignoreStoredFields)
|
public void add(IndexReader reader, boolean ignoreStoredFields)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
ensureOpen();
|
||||||
if (readers.size() == 0) {
|
if (readers.size() == 0) {
|
||||||
this.maxDoc = reader.maxDoc();
|
this.maxDoc = reader.maxDoc();
|
||||||
this.numDocs = reader.numDocs();
|
this.numDocs = reader.numDocs();
|
||||||
|
@ -110,14 +115,24 @@ public class ParallelReader extends IndexReader {
|
||||||
readers.add(reader);
|
readers.add(reader);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int numDocs() { return numDocs; }
|
public int numDocs() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return numDocs;
|
||||||
|
}
|
||||||
|
|
||||||
public int maxDoc() { return maxDoc; }
|
public int maxDoc() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return maxDoc;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean hasDeletions() { return hasDeletions; }
|
public boolean hasDeletions() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
|
return hasDeletions;
|
||||||
|
}
|
||||||
|
|
||||||
// check first reader
|
// check first reader
|
||||||
public boolean isDeleted(int n) {
|
public boolean isDeleted(int n) {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
if (readers.size() > 0)
|
if (readers.size() > 0)
|
||||||
return ((IndexReader)readers.get(0)).isDeleted(n);
|
return ((IndexReader)readers.get(0)).isDeleted(n);
|
||||||
return false;
|
return false;
|
||||||
|
@ -141,6 +156,7 @@ public class ParallelReader extends IndexReader {
|
||||||
|
|
||||||
// append fields from storedFieldReaders
|
// append fields from storedFieldReaders
|
||||||
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
Document result = new Document();
|
Document result = new Document();
|
||||||
for (int i = 0; i < storedFieldReaders.size(); i++) {
|
for (int i = 0; i < storedFieldReaders.size(); i++) {
|
||||||
IndexReader reader = (IndexReader)storedFieldReaders.get(i);
|
IndexReader reader = (IndexReader)storedFieldReaders.get(i);
|
||||||
|
@ -166,6 +182,7 @@ public class ParallelReader extends IndexReader {
|
||||||
|
|
||||||
// get all vectors
|
// get all vectors
|
||||||
public TermFreqVector[] getTermFreqVectors(int n) throws IOException {
|
public TermFreqVector[] getTermFreqVectors(int n) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
ArrayList results = new ArrayList();
|
ArrayList results = new ArrayList();
|
||||||
Iterator i = fieldToReader.entrySet().iterator();
|
Iterator i = fieldToReader.entrySet().iterator();
|
||||||
while (i.hasNext()) {
|
while (i.hasNext()) {
|
||||||
|
@ -182,22 +199,26 @@ public class ParallelReader extends IndexReader {
|
||||||
|
|
||||||
public TermFreqVector getTermFreqVector(int n, String field)
|
public TermFreqVector getTermFreqVector(int n, String field)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
||||||
return reader==null ? null : reader.getTermFreqVector(n, field);
|
return reader==null ? null : reader.getTermFreqVector(n, field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasNorms(String field) throws IOException {
|
public boolean hasNorms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
||||||
return reader==null ? false : reader.hasNorms(field);
|
return reader==null ? false : reader.hasNorms(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] norms(String field) throws IOException {
|
public byte[] norms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
||||||
return reader==null ? null : reader.norms(field);
|
return reader==null ? null : reader.norms(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void norms(String field, byte[] result, int offset)
|
public void norms(String field, byte[] result, int offset)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
ensureOpen();
|
||||||
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
IndexReader reader = ((IndexReader)fieldToReader.get(field));
|
||||||
if (reader!=null)
|
if (reader!=null)
|
||||||
reader.norms(field, result, offset);
|
reader.norms(field, result, offset);
|
||||||
|
@ -211,31 +232,38 @@ public class ParallelReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms() throws IOException {
|
public TermEnum terms() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermEnum();
|
return new ParallelTermEnum();
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms(Term term) throws IOException {
|
public TermEnum terms(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermEnum(term);
|
return new ParallelTermEnum(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int docFreq(Term term) throws IOException {
|
public int docFreq(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
IndexReader reader = ((IndexReader)fieldToReader.get(term.field()));
|
IndexReader reader = ((IndexReader)fieldToReader.get(term.field()));
|
||||||
return reader==null ? 0 : reader.docFreq(term);
|
return reader==null ? 0 : reader.docFreq(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermDocs termDocs(Term term) throws IOException {
|
public TermDocs termDocs(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermDocs(term);
|
return new ParallelTermDocs(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermDocs termDocs() throws IOException {
|
public TermDocs termDocs() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermDocs();
|
return new ParallelTermDocs();
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermPositions termPositions(Term term) throws IOException {
|
public TermPositions termPositions(Term term) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermPositions(term);
|
return new ParallelTermPositions(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermPositions termPositions() throws IOException {
|
public TermPositions termPositions() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new ParallelTermPositions();
|
return new ParallelTermPositions();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,6 +279,7 @@ public class ParallelReader extends IndexReader {
|
||||||
|
|
||||||
|
|
||||||
public Collection getFieldNames (IndexReader.FieldOption fieldNames) {
|
public Collection getFieldNames (IndexReader.FieldOption fieldNames) {
|
||||||
|
ensureOpen();
|
||||||
Set fieldSet = new HashSet();
|
Set fieldSet = new HashSet();
|
||||||
for (int i = 0; i < readers.size(); i++) {
|
for (int i = 0; i < readers.size(); i++) {
|
||||||
IndexReader reader = ((IndexReader)readers.get(i));
|
IndexReader reader = ((IndexReader)readers.get(i));
|
||||||
|
|
|
@ -266,10 +266,12 @@ class SegmentReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
static boolean hasDeletions(SegmentInfo si) throws IOException {
|
static boolean hasDeletions(SegmentInfo si) throws IOException {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
return si.hasDeletions();
|
return si.hasDeletions();
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean hasDeletions() {
|
public boolean hasDeletions() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
return deletedDocs != null;
|
return deletedDocs != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,10 +302,12 @@ class SegmentReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms() {
|
public TermEnum terms() {
|
||||||
|
ensureOpen();
|
||||||
return tis.terms();
|
return tis.terms();
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermEnum terms(Term t) throws IOException {
|
public TermEnum terms(Term t) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return tis.terms(t);
|
return tis.terms(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -312,6 +316,7 @@ class SegmentReader extends IndexReader {
|
||||||
* @throws IOException if there is a low-level IO error
|
* @throws IOException if there is a low-level IO error
|
||||||
*/
|
*/
|
||||||
public synchronized Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
public synchronized Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
if (isDeleted(n))
|
if (isDeleted(n))
|
||||||
throw new IllegalArgumentException
|
throw new IllegalArgumentException
|
||||||
("attempt to access a deleted document");
|
("attempt to access a deleted document");
|
||||||
|
@ -323,14 +328,17 @@ class SegmentReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermDocs termDocs() throws IOException {
|
public TermDocs termDocs() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new SegmentTermDocs(this);
|
return new SegmentTermDocs(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public TermPositions termPositions() throws IOException {
|
public TermPositions termPositions() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
return new SegmentTermPositions(this);
|
return new SegmentTermPositions(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int docFreq(Term t) throws IOException {
|
public int docFreq(Term t) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
TermInfo ti = tis.get(t);
|
TermInfo ti = tis.get(t);
|
||||||
if (ti != null)
|
if (ti != null)
|
||||||
return ti.docFreq;
|
return ti.docFreq;
|
||||||
|
@ -339,6 +347,7 @@ class SegmentReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int numDocs() {
|
public int numDocs() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
int n = maxDoc();
|
int n = maxDoc();
|
||||||
if (deletedDocs != null)
|
if (deletedDocs != null)
|
||||||
n -= deletedDocs.count();
|
n -= deletedDocs.count();
|
||||||
|
@ -346,6 +355,7 @@ class SegmentReader extends IndexReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int maxDoc() {
|
public int maxDoc() {
|
||||||
|
// Don't call ensureOpen() here (it could affect performance)
|
||||||
return si.docCount;
|
return si.docCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -353,6 +363,7 @@ class SegmentReader extends IndexReader {
|
||||||
* @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption)
|
* @see IndexReader#getFieldNames(IndexReader.FieldOption fldOption)
|
||||||
*/
|
*/
|
||||||
public Collection getFieldNames(IndexReader.FieldOption fieldOption) {
|
public Collection getFieldNames(IndexReader.FieldOption fieldOption) {
|
||||||
|
ensureOpen();
|
||||||
|
|
||||||
Set fieldSet = new HashSet();
|
Set fieldSet = new HashSet();
|
||||||
for (int i = 0; i < fieldInfos.size(); i++) {
|
for (int i = 0; i < fieldInfos.size(); i++) {
|
||||||
|
@ -394,6 +405,7 @@ class SegmentReader extends IndexReader {
|
||||||
|
|
||||||
|
|
||||||
public synchronized boolean hasNorms(String field) {
|
public synchronized boolean hasNorms(String field) {
|
||||||
|
ensureOpen();
|
||||||
return norms.containsKey(field);
|
return norms.containsKey(field);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -426,6 +438,7 @@ class SegmentReader extends IndexReader {
|
||||||
|
|
||||||
// returns fake norms if norms aren't available
|
// returns fake norms if norms aren't available
|
||||||
public synchronized byte[] norms(String field) throws IOException {
|
public synchronized byte[] norms(String field) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
byte[] bytes = getNorms(field);
|
byte[] bytes = getNorms(field);
|
||||||
if (bytes==null) bytes=fakeNorms();
|
if (bytes==null) bytes=fakeNorms();
|
||||||
return bytes;
|
return bytes;
|
||||||
|
@ -447,6 +460,7 @@ class SegmentReader extends IndexReader {
|
||||||
public synchronized void norms(String field, byte[] bytes, int offset)
|
public synchronized void norms(String field, byte[] bytes, int offset)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
ensureOpen();
|
||||||
Norm norm = (Norm) norms.get(field);
|
Norm norm = (Norm) norms.get(field);
|
||||||
if (norm == null) {
|
if (norm == null) {
|
||||||
System.arraycopy(fakeNorms(), 0, bytes, offset, maxDoc());
|
System.arraycopy(fakeNorms(), 0, bytes, offset, maxDoc());
|
||||||
|
@ -537,6 +551,7 @@ class SegmentReader extends IndexReader {
|
||||||
*/
|
*/
|
||||||
public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
|
public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
|
||||||
// Check if this field is invalid or has no stored term vector
|
// Check if this field is invalid or has no stored term vector
|
||||||
|
ensureOpen();
|
||||||
FieldInfo fi = fieldInfos.fieldInfo(field);
|
FieldInfo fi = fieldInfos.fieldInfo(field);
|
||||||
if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
|
if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
|
||||||
return null;
|
return null;
|
||||||
|
@ -557,6 +572,7 @@ class SegmentReader extends IndexReader {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
|
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
if (termVectorsReaderOrig == null)
|
if (termVectorsReaderOrig == null)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
package org.apache.lucene.store;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This exception is thrown when there is an attempt to
|
||||||
|
* access something that has already been closed.
|
||||||
|
*/
|
||||||
|
public class AlreadyClosedException extends IllegalStateException {
|
||||||
|
public AlreadyClosedException(String message) {
|
||||||
|
super(message);
|
||||||
|
}
|
||||||
|
}
|
|
@ -97,6 +97,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
|
|
||||||
/** Returns an array of strings, one for each file in the directory. */
|
/** Returns an array of strings, one for each file in the directory. */
|
||||||
public synchronized final String[] list() {
|
public synchronized final String[] list() {
|
||||||
|
ensureOpen();
|
||||||
Set fileNames = fileMap.keySet();
|
Set fileNames = fileMap.keySet();
|
||||||
String[] result = new String[fileNames.size()];
|
String[] result = new String[fileNames.size()];
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
@ -108,6 +109,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
|
|
||||||
/** Returns true iff the named file exists in this directory. */
|
/** Returns true iff the named file exists in this directory. */
|
||||||
public final boolean fileExists(String name) {
|
public final boolean fileExists(String name) {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file;
|
RAMFile file;
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
file = (RAMFile)fileMap.get(name);
|
file = (RAMFile)fileMap.get(name);
|
||||||
|
@ -119,6 +121,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* @throws IOException if the file does not exist
|
* @throws IOException if the file does not exist
|
||||||
*/
|
*/
|
||||||
public final long fileModified(String name) throws IOException {
|
public final long fileModified(String name) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file;
|
RAMFile file;
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
file = (RAMFile)fileMap.get(name);
|
file = (RAMFile)fileMap.get(name);
|
||||||
|
@ -132,6 +135,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* @throws IOException if the file does not exist
|
* @throws IOException if the file does not exist
|
||||||
*/
|
*/
|
||||||
public void touchFile(String name) throws IOException {
|
public void touchFile(String name) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file;
|
RAMFile file;
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
file = (RAMFile)fileMap.get(name);
|
file = (RAMFile)fileMap.get(name);
|
||||||
|
@ -154,6 +158,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* @throws IOException if the file does not exist
|
* @throws IOException if the file does not exist
|
||||||
*/
|
*/
|
||||||
public final long fileLength(String name) throws IOException {
|
public final long fileLength(String name) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file;
|
RAMFile file;
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
file = (RAMFile)fileMap.get(name);
|
file = (RAMFile)fileMap.get(name);
|
||||||
|
@ -167,6 +172,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* directory. This is currently quantized to
|
* directory. This is currently quantized to
|
||||||
* BufferedIndexOutput.BUFFER_SIZE. */
|
* BufferedIndexOutput.BUFFER_SIZE. */
|
||||||
public synchronized final long sizeInBytes() {
|
public synchronized final long sizeInBytes() {
|
||||||
|
ensureOpen();
|
||||||
return sizeInBytes;
|
return sizeInBytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -174,6 +180,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* @throws IOException if the file does not exist
|
* @throws IOException if the file does not exist
|
||||||
*/
|
*/
|
||||||
public synchronized void deleteFile(String name) throws IOException {
|
public synchronized void deleteFile(String name) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file = (RAMFile)fileMap.get(name);
|
RAMFile file = (RAMFile)fileMap.get(name);
|
||||||
if (file!=null) {
|
if (file!=null) {
|
||||||
fileMap.remove(name);
|
fileMap.remove(name);
|
||||||
|
@ -188,6 +195,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
* @deprecated
|
* @deprecated
|
||||||
*/
|
*/
|
||||||
public synchronized final void renameFile(String from, String to) throws IOException {
|
public synchronized final void renameFile(String from, String to) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile fromFile = (RAMFile)fileMap.get(from);
|
RAMFile fromFile = (RAMFile)fileMap.get(from);
|
||||||
if (fromFile==null)
|
if (fromFile==null)
|
||||||
throw new FileNotFoundException(from);
|
throw new FileNotFoundException(from);
|
||||||
|
@ -202,6 +210,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
|
|
||||||
/** Creates a new, empty file in the directory with the given name. Returns a stream writing this file. */
|
/** Creates a new, empty file in the directory with the given name. Returns a stream writing this file. */
|
||||||
public IndexOutput createOutput(String name) {
|
public IndexOutput createOutput(String name) {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file = new RAMFile(this);
|
RAMFile file = new RAMFile(this);
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
RAMFile existing = (RAMFile)fileMap.get(name);
|
RAMFile existing = (RAMFile)fileMap.get(name);
|
||||||
|
@ -216,6 +225,7 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
|
|
||||||
/** Returns a stream reading an existing file. */
|
/** Returns a stream reading an existing file. */
|
||||||
public IndexInput openInput(String name) throws IOException {
|
public IndexInput openInput(String name) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
RAMFile file;
|
RAMFile file;
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
file = (RAMFile)fileMap.get(name);
|
file = (RAMFile)fileMap.get(name);
|
||||||
|
@ -230,4 +240,12 @@ public class RAMDirectory extends Directory implements Serializable {
|
||||||
fileMap = null;
|
fileMap = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @throws AlreadyClosedException if this IndexReader is closed
|
||||||
|
*/
|
||||||
|
protected final void ensureOpen() throws AlreadyClosedException {
|
||||||
|
if (fileMap == null) {
|
||||||
|
throw new AlreadyClosedException("this RAMDirectory is closed");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.document.*;
|
||||||
import org.apache.lucene.search.Similarity;
|
import org.apache.lucene.search.Similarity;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
@ -133,6 +134,36 @@ public class TestFieldsReader extends TestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testLazyFieldsAfterClose() throws Exception {
|
||||||
|
assertTrue(dir != null);
|
||||||
|
assertTrue(fieldInfos != null);
|
||||||
|
FieldsReader reader = new FieldsReader(dir, "test", fieldInfos);
|
||||||
|
assertTrue(reader != null);
|
||||||
|
assertTrue(reader.size() == 1);
|
||||||
|
Set loadFieldNames = new HashSet();
|
||||||
|
loadFieldNames.add(DocHelper.TEXT_FIELD_1_KEY);
|
||||||
|
loadFieldNames.add(DocHelper.TEXT_FIELD_UTF1_KEY);
|
||||||
|
Set lazyFieldNames = new HashSet();
|
||||||
|
lazyFieldNames.add(DocHelper.LARGE_LAZY_FIELD_KEY);
|
||||||
|
lazyFieldNames.add(DocHelper.LAZY_FIELD_KEY);
|
||||||
|
lazyFieldNames.add(DocHelper.LAZY_FIELD_BINARY_KEY);
|
||||||
|
lazyFieldNames.add(DocHelper.TEXT_FIELD_UTF2_KEY);
|
||||||
|
lazyFieldNames.add(DocHelper.COMPRESSED_TEXT_FIELD_2_KEY);
|
||||||
|
SetBasedFieldSelector fieldSelector = new SetBasedFieldSelector(loadFieldNames, lazyFieldNames);
|
||||||
|
Document doc = reader.doc(0, fieldSelector);
|
||||||
|
assertTrue("doc is null and it shouldn't be", doc != null);
|
||||||
|
Fieldable field = doc.getFieldable(DocHelper.LAZY_FIELD_KEY);
|
||||||
|
assertTrue("field is null and it shouldn't be", field != null);
|
||||||
|
assertTrue("field is not lazy and it should be", field.isLazy());
|
||||||
|
reader.close();
|
||||||
|
try {
|
||||||
|
String value = field.stringValue();
|
||||||
|
fail("did not hit AlreadyClosedException as expected");
|
||||||
|
} catch (AlreadyClosedException e) {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testLoadFirst() throws Exception {
|
public void testLoadFirst() throws Exception {
|
||||||
assertTrue(dir != null);
|
assertTrue(dir != null);
|
||||||
assertTrue(fieldInfos != null);
|
assertTrue(fieldInfos != null);
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||||
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.WhitespaceAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
|
@ -279,21 +280,21 @@ public class TestIndexReader extends TestCase
|
||||||
try {
|
try {
|
||||||
reader.deleteDocument(4);
|
reader.deleteDocument(4);
|
||||||
fail("deleteDocument after close failed to throw IOException");
|
fail("deleteDocument after close failed to throw IOException");
|
||||||
} catch (IOException e) {
|
} catch (AlreadyClosedException e) {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
reader.setNorm(5, "aaa", 2.0f);
|
reader.setNorm(5, "aaa", 2.0f);
|
||||||
fail("setNorm after close failed to throw IOException");
|
fail("setNorm after close failed to throw IOException");
|
||||||
} catch (IOException e) {
|
} catch (AlreadyClosedException e) {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
reader.undeleteAll();
|
reader.undeleteAll();
|
||||||
fail("undeleteAll after close failed to throw IOException");
|
fail("undeleteAll after close failed to throw IOException");
|
||||||
} catch (IOException e) {
|
} catch (AlreadyClosedException e) {
|
||||||
// expected
|
// expected
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import org.apache.lucene.store.FSDirectory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.IndexOutput;
|
import org.apache.lucene.store.IndexOutput;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
|
||||||
import org.apache.lucene.store.MockRAMDirectory;
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
import org.apache.lucene.store.LockFactory;
|
import org.apache.lucene.store.LockFactory;
|
||||||
|
@ -724,6 +725,25 @@ public class TestIndexWriter extends TestCase
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testChangesAfterClose() throws IOException {
|
||||||
|
Directory dir = new RAMDirectory();
|
||||||
|
|
||||||
|
IndexWriter writer = null;
|
||||||
|
|
||||||
|
writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
|
||||||
|
addDoc(writer);
|
||||||
|
|
||||||
|
// close
|
||||||
|
writer.close();
|
||||||
|
try {
|
||||||
|
addDoc(writer);
|
||||||
|
fail("did not hit AlreadyClosedException");
|
||||||
|
} catch (AlreadyClosedException e) {
|
||||||
|
// expected
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
// Simulate a corrupt index by removing one of the cfs
|
// Simulate a corrupt index by removing one of the cfs
|
||||||
// files and make sure we get an IOException trying to
|
// files and make sure we get an IOException trying to
|
||||||
// open the index:
|
// open the index:
|
||||||
|
|
Loading…
Reference in New Issue