LUCENE-823: add unit test coverage to assert that when a MockRAMDirectory is closed, there are no open files; fixed 2 minor cases this check uncovered

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@515316 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2007-03-06 21:40:55 +00:00
parent 2380a957ef
commit 1d1e44681e
11 changed files with 129 additions and 59 deletions

View File

@ -38,6 +38,11 @@ Bug fixes
failed to reduce the number of open descriptors since it was still
opened once per field with norms. (yonik)
5. LUCENE-823: Make sure internal file handles are closed when
hitting an exception (eg disk full) while flushing deletes in
IndexWriter's mergeSegments, and also during
IndexWriter.addIndexes. (Mike McCandless)
New features
1. LUCENE-759: Added two n-gram-producing TokenFilters.

View File

@ -1274,7 +1274,11 @@ public class IndexWriter {
SegmentMerger merger = new SegmentMerger(this, mergedName);
final Vector segmentsToDelete = new Vector();
SegmentInfo info;
String segmentsInfosFileName = segmentInfos.getCurrentSegmentFileName();
IndexReader sReader = null;
try {
if (segmentInfos.size() == 1){ // add existing index, if any
sReader = SegmentReader.get(segmentInfos.info(0));
merger.add(sReader);
@ -1284,10 +1288,6 @@ public class IndexWriter {
for (int i = 0; i < readers.length; i++) // add new indexes
merger.add(readers[i]);
SegmentInfo info;
String segmentsInfosFileName = segmentInfos.getCurrentSegmentFileName();
boolean success = false;
startTransaction();
@ -1300,8 +1300,10 @@ public class IndexWriter {
segmentInfos.addElement(info);
commitPending = true;
if(sReader != null)
if(sReader != null) {
sReader.close();
sReader = null;
}
success = true;
@ -1312,12 +1314,17 @@ public class IndexWriter {
commitTransaction();
}
}
} finally {
if (sReader != null) {
sReader.close();
}
}
deleter.deleteFile(segmentsInfosFileName); // delete old segments_N file
deleter.deleteSegments(segmentsToDelete); // delete now-unused segments
if (useCompoundFile) {
success = false;
boolean success = false;
segmentsInfosFileName = segmentInfos.getCurrentSegmentFileName();
Vector filesToDelete;
@ -1699,8 +1706,13 @@ public class IndexWriter {
// the documents buffered before it, not those buffered after it.
applyDeletesSelectively(bufferedDeleteTerms, reader);
} finally {
if (reader != null)
reader.close();
if (reader != null) {
try {
reader.doCommit();
} finally {
reader.doClose();
}
}
}
}
@ -1719,8 +1731,13 @@ public class IndexWriter {
// except the one just flushed from ram.
applyDeletes(bufferedDeleteTerms, reader);
} finally {
if (reader != null)
reader.close();
if (reader != null) {
try {
reader.doCommit();
} finally {
reader.doClose();
}
}
}
}

View File

@ -226,7 +226,7 @@ public class RAMDirectory extends Directory implements Serializable {
}
/** Closes the store to future operations, releasing associated memory. */
public final void close() {
public void close() {
fileMap = null;
}

View File

@ -23,6 +23,7 @@ import junit.framework.TestSuite;
import junit.textui.TestRunner;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -91,7 +92,7 @@ public class TestFilterIndexReader extends TestCase {
* @throws Exception on error
*/
public void testFilterIndexReader() throws Exception {
RAMDirectory directory = new RAMDirectory();
RAMDirectory directory = new MockRAMDirectory();
IndexWriter writer =
new IndexWriter(directory, new WhitespaceAnalyzer(), true);
@ -123,5 +124,6 @@ public class TestFilterIndexReader extends TestCase {
}
reader.close();
directory.close();
}
}

View File

@ -61,7 +61,7 @@ public class TestIndexReader extends TestCase
public void testIsCurrent() throws Exception
{
RAMDirectory d = new RAMDirectory();
RAMDirectory d = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
addDocumentWithFields(writer);
writer.close();
@ -79,6 +79,7 @@ public class TestIndexReader extends TestCase
writer.close();
assertFalse(reader.isCurrent());
reader.close();
d.close();
}
/**
@ -87,7 +88,7 @@ public class TestIndexReader extends TestCase
*/
public void testGetFieldNames() throws Exception
{
RAMDirectory d = new RAMDirectory();
RAMDirectory d = new MockRAMDirectory();
// set up writer
IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(), true);
addDocumentWithFields(writer);
@ -99,6 +100,7 @@ public class TestIndexReader extends TestCase
assertTrue(fieldNames.contains("text"));
assertTrue(fieldNames.contains("unindexed"));
assertTrue(fieldNames.contains("unstored"));
reader.close();
// add more documents
writer = new IndexWriter(d, new StandardAnalyzer(), false);
// want to get some more segments here
@ -173,7 +175,8 @@ public class TestIndexReader extends TestCase
fieldNames = reader.getFieldNames(IndexReader.FieldOption.TERMVECTOR_WITH_POSITION_OFFSET);
assertEquals(1, fieldNames.size()); // 4 fields are indexed with term vectors
assertTrue(fieldNames.contains("tvpositionoffset"));
reader.close();
d.close();
}
@ -205,7 +208,7 @@ public class TestIndexReader extends TestCase
public void testBasicDelete() throws IOException
{
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = null;
IndexReader reader = null;
@ -224,6 +227,7 @@ public class TestIndexReader extends TestCase
reader = IndexReader.open(dir);
assertEquals("first docFreq", 100, reader.docFreq(searchTerm));
assertTermDocsCount("first reader", reader, searchTerm, 100);
reader.close();
// DELETE DOCUMENTS CONTAINING TERM: aaa
int deleted = 0;
@ -244,6 +248,8 @@ public class TestIndexReader extends TestCase
assertEquals("deleted docFreq", 100, reader.docFreq(searchTerm));
assertTermDocsCount("deleted termDocs", reader, searchTerm, 0);
reader.close();
reader2.close();
dir.close();
}
// Make sure attempts to make changes after reader is
@ -561,7 +567,7 @@ public class TestIndexReader extends TestCase
public void testLastModified() throws IOException {
assertFalse(IndexReader.indexExists("there_is_no_such_index"));
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
@ -580,11 +586,12 @@ public class TestIndexReader extends TestCase
reader = IndexReader.open(dir);
assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
reader.close();
dir.close();
}
public void testVersion() throws IOException {
assertFalse(IndexReader.indexExists("there_is_no_such_index"));
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
assertFalse(IndexReader.indexExists(dir));
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
@ -603,10 +610,11 @@ public class TestIndexReader extends TestCase
reader = IndexReader.open(dir);
assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
reader.close();
dir.close();
}
public void testLock() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
writer.close();
@ -622,10 +630,11 @@ public class TestIndexReader extends TestCase
reader.deleteDocument(0);
reader.close();
writer.close();
dir.close();
}
public void testUndeleteAll() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
@ -638,10 +647,11 @@ public class TestIndexReader extends TestCase
reader = IndexReader.open(dir);
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.close();
dir.close();
}
public void testUndeleteAllAfterClose() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
@ -654,10 +664,11 @@ public class TestIndexReader extends TestCase
reader.undeleteAll();
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.close();
dir.close();
}
public void testUndeleteAllAfterCloseThenReopen() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDocumentWithFields(writer);
addDocumentWithFields(writer);
@ -672,6 +683,7 @@ public class TestIndexReader extends TestCase
reader = IndexReader.open(dir);
assertEquals(2, reader.numDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.close();
dir.close();
}
public void testDeleteReaderReaderConflictUnoptimized() throws IOException{
@ -694,7 +706,7 @@ public class TestIndexReader extends TestCase
int END_COUNT = 144;
// First build up a starting index:
RAMDirectory startDir = new RAMDirectory();
RAMDirectory startDir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true);
for(int i=0;i<157;i++) {
Document d = new Document();
@ -875,10 +887,12 @@ public class TestIndexReader extends TestCase
// Try again with 10 more bytes of free space:
diskFree += 10;
}
startDir.close();
}
public void testDocsOutOfOrderJIRA140() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
for(int i=0;i<11;i++) {
addDoc(writer, "aaa");
@ -913,11 +927,12 @@ public class TestIndexReader extends TestCase
if (!gotException) {
fail("delete of out-of-bounds doc number failed to hit exception");
}
dir.close();
}
public void testExceptionReleaseWriteLockJIRA768() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
addDoc(writer, "aaa");
writer.close();
@ -945,6 +960,7 @@ public class TestIndexReader extends TestCase
if (IndexReader.isLocked(dir)) {
fail("write lock is still held after close");
}
dir.close();
}
private String arrayToString(String[] l) {

View File

@ -427,6 +427,7 @@ public class TestIndexWriter extends TestCase
assertTrue("optimized used too much temporary space: starting usage was " + startDiskUsage + " bytes; max temp usage was " + maxDiskUsage + " but should have been " + (2*startDiskUsage) + " (= 2X starting usage)",
maxDiskUsage <= 2*startDiskUsage);
dir.close();
}
private String arrayToString(String[] l) {

View File

@ -36,6 +36,7 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
public class TestParallelReader extends TestCase {
@ -103,7 +104,7 @@ public class TestParallelReader extends TestCase {
Directory dir1 = getDir1();
// one document only:
Directory dir2 = new RAMDirectory();
Directory dir2 = new MockRAMDirectory();
IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(), true);
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.TOKENIZED));
@ -137,7 +138,7 @@ public class TestParallelReader extends TestCase {
// Fiels 1-4 indexed together:
private Searcher single() throws IOException {
Directory dir = new RAMDirectory();
Directory dir = new MockRAMDirectory();
IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(), true);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));
@ -167,7 +168,7 @@ public class TestParallelReader extends TestCase {
}
private Directory getDir1() throws IOException {
Directory dir1 = new RAMDirectory();
Directory dir1 = new MockRAMDirectory();
IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(), true);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.TOKENIZED));

View File

@ -150,7 +150,7 @@ public class TestStressIndexing extends TestCase {
public void testStressIndexAndSearching() throws Exception {
// First in a RAM directory:
Directory directory = new RAMDirectory();
Directory directory = new MockRAMDirectory();
runStressTest(directory);
directory.close();

View File

@ -29,6 +29,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
import java.io.IOException;
import java.util.Collections;
@ -59,8 +60,8 @@ public class TestMultiSearcher extends TestCase
throws Exception
{
// creating two directories for indices
Directory indexStoreA = new RAMDirectory();
Directory indexStoreB = new RAMDirectory();
Directory indexStoreA = new MockRAMDirectory();
Directory indexStoreB = new MockRAMDirectory();
// creating a document to store
Document lDoc = new Document();
@ -196,6 +197,8 @@ public class TestMultiSearcher extends TestCase
Document d = hits3.doc(i);
}
mSearcher3.close();
indexStoreA.close();
indexStoreB.close();
}
private static Document createDocument(String contents1, String contents2) {
@ -288,7 +291,7 @@ public class TestMultiSearcher extends TestCase
IndexSearcher indexSearcher1;
Hits hits;
ramDirectory1=new RAMDirectory();
ramDirectory1=new MockRAMDirectory();
// First put the documents in the same index
initIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...
@ -316,8 +319,8 @@ public class TestMultiSearcher extends TestCase
RAMDirectory ramDirectory2;
IndexSearcher indexSearcher2;
ramDirectory1=new RAMDirectory();
ramDirectory2=new RAMDirectory();
ramDirectory1=new MockRAMDirectory();
ramDirectory2=new MockRAMDirectory();
// Now put the documents in a different index
initIndex(ramDirectory1, nDocs, true, null); // documents with a single token "doc0", "doc1", etc...

View File

@ -200,4 +200,17 @@ public class MockRAMDirectory extends RAMDirectory {
size += ((RAMFile) it.next()).length;
return size;
}
public void close() {
if (openFiles == null) {
openFiles = new HashMap();
}
synchronized(openFiles) {
if (noDeleteOpenFile && openFiles.size() > 0) {
// RuntimeException instead of IOException because
// super() does not throw IOException currently:
throw new RuntimeException("MockRAMDirectory: cannot close: there are still open files: " + openFiles);
}
}
}
}

View File

@ -25,6 +25,7 @@ package org.apache.lucene.store;
public class MockRAMInputStream extends RAMInputStream {
private MockRAMDirectory dir;
private String name;
private boolean isClone;
/** Construct an empty output buffer. */
public MockRAMInputStream(MockRAMDirectory dir, String name, RAMFile f) {
@ -35,6 +36,10 @@ public class MockRAMInputStream extends RAMInputStream {
public void close() {
super.close();
// Pending resolution on LUCENE-686 we may want to
// remove the conditional check so we also track that
// all clones get closed:
if (!isClone) {
synchronized(dir.openFiles) {
Integer v = (Integer) dir.openFiles.get(name);
if (v.intValue() == 1) {
@ -45,9 +50,15 @@ public class MockRAMInputStream extends RAMInputStream {
}
}
}
}
public Object clone() {
MockRAMInputStream clone = (MockRAMInputStream) super.clone();
clone.isClone = true;
// Pending resolution on LUCENE-686 we may want to
// uncomment this code so that we also track that all
// clones get closed:
/*
synchronized(dir.openFiles) {
if (dir.openFiles.containsKey(name)) {
Integer v = (Integer) dir.openFiles.get(name);
@ -57,6 +68,7 @@ public class MockRAMInputStream extends RAMInputStream {
throw new RuntimeException("BUG: cloned file was not open?");
}
}
*/
return clone;
}
}