mirror of https://github.com/apache/lucene.git
LUCENE-1700: make sure expungeDeletes does its job even when a near real-time reader it in use
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@786551 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ec65df2156
commit
fc243f12ce
|
@ -625,6 +625,15 @@ public class IndexWriter {
|
|||
sr.incRef();
|
||||
return sr;
|
||||
}
|
||||
|
||||
// Returns a ref
|
||||
public synchronized SegmentReader getIfExists(SegmentInfo info) throws IOException {
|
||||
SegmentReader sr = (SegmentReader) readerMap.get(info);
|
||||
if (sr != null) {
|
||||
sr.incRef();
|
||||
}
|
||||
return sr;
|
||||
}
|
||||
}
|
||||
|
||||
synchronized void acquireWrite() {
|
||||
|
@ -4142,18 +4151,10 @@ public class IndexWriter {
|
|||
|
||||
docWriter.pushDeletes();
|
||||
|
||||
if (flushDocs)
|
||||
if (flushDocs) {
|
||||
segmentInfos.add(newSegment);
|
||||
|
||||
if (flushDeletes) {
|
||||
flushDeletesCount++;
|
||||
applyDeletes();
|
||||
}
|
||||
|
||||
doAfterFlush();
|
||||
|
||||
if (flushDocs)
|
||||
checkpoint();
|
||||
}
|
||||
|
||||
if (flushDocs && mergePolicy.useCompoundFile(segmentInfos, newSegment)) {
|
||||
// Now build compound file
|
||||
|
@ -4173,6 +4174,16 @@ public class IndexWriter {
|
|||
checkpoint();
|
||||
}
|
||||
|
||||
if (flushDeletes) {
|
||||
flushDeletesCount++;
|
||||
applyDeletes();
|
||||
}
|
||||
|
||||
if (flushDocs)
|
||||
checkpoint();
|
||||
|
||||
doAfterFlush();
|
||||
|
||||
return flushDocs;
|
||||
|
||||
} catch (OutOfMemoryError oom) {
|
||||
|
|
|
@ -303,7 +303,16 @@ public abstract class LogMergePolicy extends MergePolicy {
|
|||
int firstSegmentWithDeletions = -1;
|
||||
for(int i=0;i<numSegments;i++) {
|
||||
final SegmentInfo info = segmentInfos.info(i);
|
||||
if (info.hasDeletions()) {
|
||||
boolean deletionsInRAM = false;
|
||||
SegmentReader sr = writer.readerPool.getIfExists(info);
|
||||
try {
|
||||
deletionsInRAM = sr != null && sr.hasDeletions();
|
||||
} finally {
|
||||
if (sr != null) {
|
||||
writer.readerPool.release(sr);
|
||||
}
|
||||
}
|
||||
if (info.hasDeletions() || deletionsInRAM) {
|
||||
if (verbose())
|
||||
message(" segment " + info.name + " has deletions");
|
||||
if (firstSegmentWithDeletions == -1)
|
||||
|
|
|
@ -805,4 +805,29 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
r.close();
|
||||
dir1.close();
|
||||
}
|
||||
|
||||
public void testExpungeDeletes() throws Throwable {
|
||||
Directory dir = new MockRAMDirectory();
|
||||
final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
|
||||
IndexWriter.MaxFieldLength.LIMITED);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
|
||||
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
|
||||
doc.add(id);
|
||||
id.setValue("0");
|
||||
w.addDocument(doc);
|
||||
id.setValue("1");
|
||||
w.addDocument(doc);
|
||||
w.deleteDocuments(new Term("id", "0"));
|
||||
|
||||
IndexReader r = w.getReader();
|
||||
w.expungeDeletes();
|
||||
w.close();
|
||||
r.close();
|
||||
r = IndexReader.open(dir);
|
||||
assertEquals(1, r.numDocs());
|
||||
assertFalse(r.hasDeletions());
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue