mirror of https://github.com/apache/lucene.git
LUCENE-1270: fix intermittant case where IW.close() can hang after IW.addIndexesNoOptimize is called
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@651026 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e35d9f6c62
commit
963ec9e522
|
@ -1658,6 +1658,11 @@ public class IndexWriter {
|
||||||
// going to wait for merges:
|
// going to wait for merges:
|
||||||
flush(waitForMerges, true, true);
|
flush(waitForMerges, true, true);
|
||||||
|
|
||||||
|
if (waitForMerges)
|
||||||
|
// Give merge scheduler last chance to run, in case
|
||||||
|
// any pending merges are waiting:
|
||||||
|
mergeScheduler.merge(this);
|
||||||
|
|
||||||
mergePolicy.close();
|
mergePolicy.close();
|
||||||
|
|
||||||
finishMerges(waitForMerges);
|
finishMerges(waitForMerges);
|
||||||
|
@ -2889,6 +2894,9 @@ public class IndexWriter {
|
||||||
* then copy them over. Currently this is only used by
|
* then copy them over. Currently this is only used by
|
||||||
* addIndexesNoOptimize(). */
|
* addIndexesNoOptimize(). */
|
||||||
private void copyExternalSegments() throws CorruptIndexException, IOException {
|
private void copyExternalSegments() throws CorruptIndexException, IOException {
|
||||||
|
|
||||||
|
boolean any = false;
|
||||||
|
|
||||||
while(true) {
|
while(true) {
|
||||||
SegmentInfo info = null;
|
SegmentInfo info = null;
|
||||||
MergePolicy.OneMerge merge = null;
|
MergePolicy.OneMerge merge = null;
|
||||||
|
@ -2907,6 +2915,7 @@ public class IndexWriter {
|
||||||
if (registerMerge(merge)) {
|
if (registerMerge(merge)) {
|
||||||
pendingMerges.remove(merge);
|
pendingMerges.remove(merge);
|
||||||
runningMerges.add(merge);
|
runningMerges.add(merge);
|
||||||
|
any = true;
|
||||||
merge(merge);
|
merge(merge);
|
||||||
} else
|
} else
|
||||||
// This means there is a bug in the
|
// This means there is a bug in the
|
||||||
|
@ -2923,6 +2932,11 @@ public class IndexWriter {
|
||||||
// No more external segments
|
// No more external segments
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (any)
|
||||||
|
// Sometimes, on copying an external segment over,
|
||||||
|
// more merges may become necessary:
|
||||||
|
mergeScheduler.merge(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Merges the provided indexes into this index.
|
/** Merges the provided indexes into this index.
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.RAMDirectory;
|
||||||
|
import org.apache.lucene.store.MockRAMDirectory;
|
||||||
|
|
||||||
import org.apache.lucene.search.PhraseQuery;
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
|
|
||||||
|
@ -432,8 +433,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
private void addDocs(IndexWriter writer, int numDocs) throws IOException {
|
private void addDocs(IndexWriter writer, int numDocs) throws IOException {
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc
|
doc.add(new Field("content", "aaa", Field.Store.NO,
|
||||||
.add(new Field("content", "aaa", Field.Store.NO,
|
|
||||||
Field.Index.TOKENIZED));
|
Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
@ -442,8 +442,7 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
private void addDocs2(IndexWriter writer, int numDocs) throws IOException {
|
private void addDocs2(IndexWriter writer, int numDocs) throws IOException {
|
||||||
for (int i = 0; i < numDocs; i++) {
|
for (int i = 0; i < numDocs; i++) {
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc
|
doc.add(new Field("content", "bbb", Field.Store.NO,
|
||||||
.add(new Field("content", "bbb", Field.Store.NO,
|
|
||||||
Field.Index.TOKENIZED));
|
Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
|
@ -495,4 +494,47 @@ public class TestAddIndexesNoOptimize extends LuceneTestCase {
|
||||||
assertEquals(3, writer.getSegmentCount());
|
assertEquals(3, writer.getSegmentCount());
|
||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LUCENE-1270
|
||||||
|
public void testHangOnClose() throws IOException {
|
||||||
|
|
||||||
|
Directory dir = new MockRAMDirectory();
|
||||||
|
IndexWriter writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
writer.setMergePolicy(new LogByteSizeMergePolicy());
|
||||||
|
writer.setMaxBufferedDocs(5);
|
||||||
|
writer.setUseCompoundFile(false);
|
||||||
|
writer.setMergeFactor(100);
|
||||||
|
|
||||||
|
Document doc = new Document();
|
||||||
|
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||||
|
Field.Index.TOKENIZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||||
|
for(int i=0;i<60;i++)
|
||||||
|
writer.addDocument(doc);
|
||||||
|
writer.setMaxBufferedDocs(200);
|
||||||
|
Document doc2 = new Document();
|
||||||
|
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||||
|
Field.Index.NO));
|
||||||
|
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||||
|
Field.Index.NO));
|
||||||
|
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||||
|
Field.Index.NO));
|
||||||
|
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
|
||||||
|
Field.Index.NO));
|
||||||
|
for(int i=0;i<10;i++)
|
||||||
|
writer.addDocument(doc2);
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
Directory dir2 = new MockRAMDirectory();
|
||||||
|
writer = new IndexWriter(dir2, false, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
|
||||||
|
LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
|
||||||
|
lmp.setMinMergeMB(0.0001);
|
||||||
|
writer.setMergePolicy(lmp);
|
||||||
|
writer.setMergeFactor(4);
|
||||||
|
writer.setUseCompoundFile(false);
|
||||||
|
writer.setMergeScheduler(new SerialMergeScheduler());
|
||||||
|
writer.addIndexesNoOptimize(new Directory[] {dir});
|
||||||
|
writer.close();
|
||||||
|
dir.close();
|
||||||
|
dir2.close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue