LUCENE-1008: fix corruption case when document has no term vector fields after other docs with term vector fields were added

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@580578 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2007-09-29 12:52:45 +00:00
parent 7496e8834f
commit 014f23033f
3 changed files with 31 additions and 4 deletions

View File

@ -105,6 +105,11 @@ Bug fixes
14. LUCENE-992: Fixed IndexWriter.updateDocument to be atomic again 14. LUCENE-992: Fixed IndexWriter.updateDocument to be atomic again
(this was broken by LUCENE-843). (Ning Li via Mike McCandless) (this was broken by LUCENE-843). (Ning Li via Mike McCandless)
15. LUCENE-1008: Fixed corruption case when document with no term
vector fields is added after documents with term vector fields.
This bug was introduced with LUCENE-843. (Grant Ingersoll via
Mike McCandless)
New features New features
1. LUCENE-906: Elision filter for French. 1. LUCENE-906: Elision filter for French.

View File

@ -552,8 +552,8 @@ final class DocumentsWriter {
// Append term vectors to the real outputs: // Append term vectors to the real outputs:
if (tvx != null) { if (tvx != null) {
tvx.writeLong(tvd.getFilePointer()); tvx.writeLong(tvd.getFilePointer());
tvd.writeVInt(numVectorFields);
if (numVectorFields > 0) { if (numVectorFields > 0) {
tvd.writeVInt(numVectorFields);
for(int i=0;i<numVectorFields;i++) for(int i=0;i<numVectorFields;i++)
tvd.writeVInt(vectorFieldNumbers[i]); tvd.writeVInt(vectorFieldNumbers[i]);
assert 0 == vectorFieldPointers[0]; assert 0 == vectorFieldPointers[0];

View File

@ -1464,7 +1464,29 @@ public class TestIndexWriter extends TestCase
dir.close(); dir.close();
} }
// LUCENE-1008
public void testNoTermVectorAfterTermVector() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(), true);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.TOKENIZED,
Field.TermVector.YES));
iw.addDocument(document);
document = new Document();
document.add(new Field("tvtest", "x y z", Field.Store.NO, Field.Index.TOKENIZED,
Field.TermVector.NO));
iw.addDocument(document);
// Make first segment
iw.flush();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.TOKENIZED,
Field.TermVector.YES));
iw.addDocument(document);
// Make 2nd segment
iw.flush();
iw.optimize();
iw.close();
dir.close();
}
} }