mirror of https://github.com/apache/lucene.git
LUCENE-1210: fix deadlock case on hitting exception in mergeInit
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@635190 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
10f2695b65
commit
fa4d10baf9
|
@ -3590,6 +3590,19 @@ public class IndexWriter {
|
|||
/** Does initial setup for a merge, which is fast but holds
|
||||
* the synchronized lock on IndexWriter instance. */
|
||||
final synchronized void mergeInit(MergePolicy.OneMerge merge) throws IOException {
|
||||
boolean success = false;
|
||||
try {
|
||||
_mergeInit(merge);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
mergeFinish(merge);
|
||||
runningMerges.remove(merge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final synchronized private void _mergeInit(MergePolicy.OneMerge merge) throws IOException {
|
||||
|
||||
assert testPoint("startMergeInit");
|
||||
|
||||
|
|
|
@ -3181,7 +3181,7 @@ public class TestIndexWriter extends LuceneTestCase
|
|||
Field.Index.TOKENIZED));
|
||||
try {
|
||||
w.addDocument(crashDoc, analyzer);
|
||||
fail("did not hit exxpected exception");
|
||||
fail("did not hit expected exception");
|
||||
} catch (IOException ioe) {
|
||||
// expected
|
||||
}
|
||||
|
@ -3189,4 +3189,46 @@ public class TestIndexWriter extends LuceneTestCase
|
|||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public class MockIndexWriter2 extends IndexWriter {
|
||||
|
||||
public MockIndexWriter2(Directory dir, boolean autoCommit, Analyzer a, boolean create, MaxFieldLength mfl) throws IOException {
|
||||
super(dir, autoCommit, a, create, mfl);
|
||||
}
|
||||
|
||||
boolean doFail;
|
||||
boolean failed;
|
||||
|
||||
boolean testPoint(String name) {
|
||||
if (doFail && name.equals("startMergeInit")) {
|
||||
failed = true;
|
||||
throw new RuntimeException("intentionally failing");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// LUCENE-1210
|
||||
public void testExceptionOnMergeInit() throws IOException {
|
||||
MockRAMDirectory dir = new MockRAMDirectory();
|
||||
MockIndexWriter2 w = new MockIndexWriter2(dir, false, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
w.setMaxBufferedDocs(2);
|
||||
w.setMergeFactor(2);
|
||||
w.doFail = true;
|
||||
w.setMergeScheduler(new ConcurrentMergeScheduler());
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a field", Field.Store.YES,
|
||||
Field.Index.TOKENIZED));
|
||||
for(int i=0;i<10;i++)
|
||||
try {
|
||||
w.addDocument(doc);
|
||||
} catch (RuntimeException re) {
|
||||
break;
|
||||
}
|
||||
|
||||
((ConcurrentMergeScheduler) w.getMergeScheduler()).sync();
|
||||
assertTrue(w.failed);
|
||||
w.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue