LUCENE-1647: fix case where IndexReader.undeleteAll would make the segment's deletion count incorrect

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@777686 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-05-22 19:57:53 +00:00
parent caeedf1233
commit 28fe2523d9
4 changed files with 32 additions and 12 deletions

View File

@ -179,6 +179,9 @@ Bug fixes
sort) by doc Id in a consistent manner (i.e., if Sort.FIELD_DOC was used vs.
when it wasn't). (Shai Erera via Michael McCandless)
10. LUCENE-1647: Fix case where IndexReader.undeleteAll would cause
the segment's deletion count to be incorrect. (Mike McCandless)
New features
1. LUCENE-1411: Added expert API to open an IndexWriter on a prior

View File

@ -57,12 +57,10 @@ class SegmentReader extends DirectoryIndexReader {
Ref deletedDocsRef = null;
private boolean deletedDocsDirty = false;
private boolean normsDirty = false;
private boolean undeleteAll = false;
private int pendingDeleteCount;
private boolean rollbackDeletedDocsDirty = false;
private boolean rollbackNormsDirty = false;
private boolean rollbackUndeleteAll = false;
private int rollbackPendingDeleteCount;
IndexInput freqStream;
IndexInput proxStream;
@ -762,11 +760,10 @@ class SegmentReader extends DirectoryIndexReader {
si.setDelCount(si.getDelCount()+pendingDeleteCount);
pendingDeleteCount = 0;
assert deletedDocs.count() == si.getDelCount(): "delete count mismatch during commit: info=" + si.getDelCount() + " vs BitVector=" + deletedDocs.count();
} else {
assert pendingDeleteCount == 0;
}
if (undeleteAll && si.hasDeletions()) {
si.clearDelGen();
si.setDelCount(0);
}
if (normsDirty) { // re-write norms
si.setNumFields(fieldInfos.size());
Iterator it = norms.values().iterator();
@ -779,7 +776,6 @@ class SegmentReader extends DirectoryIndexReader {
}
deletedDocsDirty = false;
normsDirty = false;
undeleteAll = false;
}
FieldsReader getFieldsReader() {
@ -865,21 +861,23 @@ class SegmentReader extends DirectoryIndexReader {
oldRef.decRef();
}
deletedDocsDirty = true;
undeleteAll = false;
if (!deletedDocs.getAndSet(docNum))
pendingDeleteCount++;
}
protected void doUndeleteAll() {
deletedDocsDirty = false;
undeleteAll = true;
if (deletedDocs != null) {
assert deletedDocsRef != null;
deletedDocsRef.decRef();
deletedDocs = null;
deletedDocsRef = null;
pendingDeleteCount = 0;
si.clearDelGen();
si.setDelCount(0);
} else {
assert deletedDocsRef == null;
assert pendingDeleteCount == 0;
}
}
@ -1254,7 +1252,6 @@ class SegmentReader extends DirectoryIndexReader {
super.startCommit();
rollbackDeletedDocsDirty = deletedDocsDirty;
rollbackNormsDirty = normsDirty;
rollbackUndeleteAll = undeleteAll;
rollbackPendingDeleteCount = pendingDeleteCount;
Iterator it = norms.values().iterator();
while (it.hasNext()) {
@ -1267,7 +1264,6 @@ class SegmentReader extends DirectoryIndexReader {
super.rollbackCommit();
deletedDocsDirty = rollbackDeletedDocsDirty;
normsDirty = rollbackNormsDirty;
undeleteAll = rollbackUndeleteAll;
pendingDeleteCount = rollbackPendingDeleteCount;
Iterator it = norms.values().iterator();
while (it.hasNext()) {

View File

@ -1590,6 +1590,27 @@ public class TestIndexReader extends LuceneTestCase
IndexReader.open(dir).close();
}
// LUCENE-1647
public void testIndexReaderUnDeleteAll() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
dir.setPreventDoubleWrite(false);
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(),
IndexWriter.MaxFieldLength.UNLIMITED);
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
writer.addDocument(createDocument("c"));
writer.close();
IndexReader reader = IndexReader.open(dir);
reader.deleteDocuments(new Term("id", "a"));
reader.flush();
reader.deleteDocuments(new Term("id", "b"));
reader.undeleteAll();
reader.deleteDocuments(new Term("id", "b"));
reader.close();
IndexReader.open(dir).close();
dir.close();
}
private Document createDocument(String id) {
Document doc = new Document();
doc.add(new Field("id", id, Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS));

View File

@ -215,7 +215,7 @@ public class MockRAMDirectory extends RAMDirectory {
createdFiles.add(name);
RAMFile existing = (RAMFile)fileMap.get(name);
// Enforce write once:
if (existing!=null && !name.equals("segments.gen"))
if (existing!=null && !name.equals("segments.gen") && preventDoubleWrite)
throw new IOException("file " + name + " already exists");
else {
if (existing!=null) {