LUCENE-4653: make test more evil; fix leak on exception in IW.getReader

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1428432 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2013-01-03 16:11:31 +00:00
parent 325c3678ea
commit 85a3d34674
2 changed files with 66 additions and 40 deletions

View File

@ -336,7 +336,7 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
// obtained during this flush are pooled, the first time
// this method is called:
poolReaders = true;
final DirectoryReader r;
DirectoryReader r = null;
doBeforeFlush();
boolean anySegmentFlushed = false;
/*
@ -346,46 +346,54 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
* We release the two stage full flush after we are done opening the
* directory reader!
*/
synchronized (fullFlushLock) {
boolean success = false;
try {
anySegmentFlushed = docWriter.flushAllThreads();
if (!anySegmentFlushed) {
// prevent double increment since docWriter#doFlush increments the flushcount
// if we flushed anything.
flushCount.incrementAndGet();
}
success = true;
// Prevent segmentInfos from changing while opening the
// reader; in theory we could do similar retry logic,
// just like we do when loading segments_N
synchronized(this) {
maybeApplyDeletes(applyAllDeletes);
r = StandardDirectoryReader.open(this, segmentInfos, applyAllDeletes);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "return reader version=" + r.getVersion() + " reader=" + r);
boolean success2 = false;
try {
synchronized (fullFlushLock) {
boolean success = false;
try {
anySegmentFlushed = docWriter.flushAllThreads();
if (!anySegmentFlushed) {
// prevent double increment since docWriter#doFlush increments the flushcount
// if we flushed anything.
flushCount.incrementAndGet();
}
}
} catch (OutOfMemoryError oom) {
handleOOM(oom, "getReader");
// never reached but javac disagrees:
return null;
} finally {
if (!success) {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "hit exception during NRT reader");
success = true;
// Prevent segmentInfos from changing while opening the
// reader; in theory we could do similar retry logic,
// just like we do when loading segments_N
synchronized(this) {
maybeApplyDeletes(applyAllDeletes);
r = StandardDirectoryReader.open(this, segmentInfos, applyAllDeletes);
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "return reader version=" + r.getVersion() + " reader=" + r);
}
}
} catch (OutOfMemoryError oom) {
handleOOM(oom, "getReader");
// never reached but javac disagrees:
return null;
} finally {
if (!success) {
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "hit exception during NRT reader");
}
}
// Done: finish the full flush!
docWriter.finishFullFlush(success);
doAfterFlush();
}
// Done: finish the full flush!
docWriter.finishFullFlush(success);
doAfterFlush();
}
}
if (anySegmentFlushed) {
maybeMerge(MergeTrigger.FULL_FLUSH, UNBOUNDED_MAX_MERGE_SEGMENTS);
}
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "getReader took " + (System.currentTimeMillis() - tStart) + " msec");
if (anySegmentFlushed) {
maybeMerge(MergeTrigger.FULL_FLUSH, UNBOUNDED_MAX_MERGE_SEGMENTS);
}
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "getReader took " + (System.currentTimeMillis() - tStart) + " msec");
}
success2 = true;
} finally {
if (!success2) {
IOUtils.closeWhileHandlingException(r);
}
}
return r;
}

View File

@ -17,7 +17,6 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
@ -1028,14 +1027,33 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newField("field", "some text contents", storedTextType));
for(int i=0;i<100;i++) {
idField.setStringValue(Integer.toString(i));
if (i%2 == 0) {
if (i%30 == 0) {
w.deleteAll();
} else if (i%2 == 0) {
w.updateDocument(new Term("id", idField.stringValue()), doc);
} else {
w.addDocument(doc);
}
if (i%3 == 0) {
IndexReader r = null;
boolean success = false;
try {
r = DirectoryReader.open(w, true);
success = true;
} finally {
if (success) {
r.close();
} else {
IOUtils.closeWhileHandlingException(r);
}
}
}
if (i%10 == 0) {
w.commit();
}
if (i%40 == 0) {
w.forceMerge(1);
}
}
w.close();
w = null;
@ -1120,7 +1138,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(new ThreadInterruptedException(new InterruptedException()).getCause() instanceof InterruptedException);
// issue 300 interrupts to child thread
final int numInterrupts = atLeast(300);
final int numInterrupts = atLeast(3000);
int i = 0;
while(i < numInterrupts) {
// TODO: would be nice to also sometimes interrupt the