LUCENE-6999: break out PointWriter.finish from .close to avoid leaking file handles on exception

This commit is contained in:
Mike McCandless 2016-01-28 11:02:24 -05:00
parent af3a19574e
commit 791ddc627b
11 changed files with 81 additions and 32 deletions

View File

@ -196,10 +196,14 @@ class SimpleTextPointWriter extends PointWriter {
SimpleTextUtil.writeNewline(out);
}
@Override
public void finish() throws IOException {
SimpleTextUtil.writeChecksum(dataOut, scratch);
}
@Override
public void close() throws IOException {
if (dataOut != null) {
SimpleTextUtil.writeChecksum(dataOut, scratch);
dataOut.close();
dataOut = null;

View File

@ -126,5 +126,9 @@ public abstract class PointWriter implements Closeable {
mergeOneField(mergeState, fieldInfo);
}
}
finish();
}
/** Called once at the end before close */
public abstract void finish() throws IOException;
}

View File

@ -47,7 +47,7 @@ public class Lucene60PointWriter extends PointWriter implements Closeable {
final SegmentWriteState writeState;
final int maxPointsInLeafNode;
final double maxMBSortInHeap;
private boolean closed;
private boolean finished;
/** Full constructor */
public Lucene60PointWriter(SegmentWriteState writeState, int maxPointsInLeafNode, double maxMBSortInHeap) throws IOException {
@ -168,38 +168,45 @@ public class Lucene60PointWriter extends PointWriter implements Closeable {
mergeOneField(mergeState, fieldInfo);
}
}
}
}
}
finish();
}
@Override
public void finish() throws IOException {
if (finished) {
throw new IllegalStateException("already finished");
}
finished = true;
CodecUtil.writeFooter(dataOut);
String indexFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name,
writeState.segmentSuffix,
Lucene60PointFormat.INDEX_EXTENSION);
// Write index file
try (IndexOutput indexOut = writeState.directory.createOutput(indexFileName, writeState.context)) {
CodecUtil.writeIndexHeader(indexOut,
Lucene60PointFormat.META_CODEC_NAME,
Lucene60PointFormat.INDEX_VERSION_CURRENT,
writeState.segmentInfo.getId(),
writeState.segmentSuffix);
int count = indexFPs.size();
indexOut.writeVInt(count);
for(Map.Entry<String,Long> ent : indexFPs.entrySet()) {
FieldInfo fieldInfo = writeState.fieldInfos.fieldInfo(ent.getKey());
if (fieldInfo == null) {
throw new IllegalStateException("wrote field=\"" + ent.getKey() + "\" but that field doesn't exist in FieldInfos");
}
indexOut.writeVInt(fieldInfo.number);
indexOut.writeVLong(ent.getValue());
}
CodecUtil.writeFooter(indexOut);
}
}
@Override
public void close() throws IOException {
if (closed == false) {
CodecUtil.writeFooter(dataOut);
dataOut.close();
closed = true;
String indexFileName = IndexFileNames.segmentFileName(writeState.segmentInfo.name,
writeState.segmentSuffix,
Lucene60PointFormat.INDEX_EXTENSION);
// Write index file
try (IndexOutput indexOut = writeState.directory.createOutput(indexFileName, writeState.context)) {
CodecUtil.writeIndexHeader(indexOut,
Lucene60PointFormat.META_CODEC_NAME,
Lucene60PointFormat.INDEX_VERSION_CURRENT,
writeState.segmentInfo.getId(),
writeState.segmentSuffix);
int count = indexFPs.size();
indexOut.writeVInt(count);
for(Map.Entry<String,Long> ent : indexFPs.entrySet()) {
FieldInfo fieldInfo = writeState.fieldInfos.fieldInfo(ent.getKey());
if (fieldInfo == null) {
throw new IllegalStateException("wrote field=\"" + ent.getKey() + "\" but that field doesn't exist in FieldInfos");
}
indexOut.writeVInt(fieldInfo.number);
indexOut.writeVLong(ent.getValue());
}
CodecUtil.writeFooter(indexOut);
}
}
dataOut.close();
}
}

View File

@ -152,6 +152,9 @@ final class DefaultIndexingChain extends DocConsumer {
perField = perField.next;
}
}
if (pointWriter != null) {
pointWriter.finish();
}
success = true;
} finally {
if (success) {

View File

@ -32,6 +32,8 @@ import org.apache.lucene.codecs.memory.MemoryPostingsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
@ -169,6 +171,8 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
// Deletes one of the 10 added docs, leaving 9:
@ -202,6 +206,8 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
@ -238,6 +244,8 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newStringField("id", "" + (i % 10), Field.Store.NO));
doc.add(newTextField("content", "bbb " + i, Field.Store.NO));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.updateDocument(new Term("id", "" + (i%10)), doc);
}
@ -510,6 +518,8 @@ public class TestAddIndexes extends LuceneTestCase {
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
}
@ -518,6 +528,8 @@ public class TestAddIndexes extends LuceneTestCase {
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
doc.add(newTextField("content", "bbb", Field.Store.NO));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
}
@ -1001,6 +1013,8 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(newTextField("id", "" + (docStart + i), Field.Store.YES));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
writer.addDocument(doc);
}
}

View File

@ -80,6 +80,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
Document d = new Document();
d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
d.add(new TextField("contents", English.intToEnglish(i+10*count), Field.Store.NO));
d.add(new IntPoint("doc", i));
writer.updateDocument(new Term("id", Integer.toString(i)), d);
}
}

View File

@ -20,6 +20,8 @@ package org.apache.lucene.index;
import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
@ -35,6 +37,8 @@ public class TestCodecHoldsOpenFiles extends LuceneTestCase {
for(int i=0;i<numDocs;i++) {
Document doc = new Document();
doc.add(newField("foo", "bar", TextField.TYPE_NOT_STORED));
doc.add(new IntPoint("doc", i));
doc.add(new NumericDocValuesField("dv", i));
w.addDocument(doc);
}

View File

@ -34,6 +34,7 @@ import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
@ -125,6 +126,7 @@ public class TestIndexWriterExceptions2 extends LuceneTestCase {
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc

View File

@ -24,6 +24,7 @@ import org.apache.lucene.codecs.LiveDocsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
@ -572,6 +573,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("content", "aaa", Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
writer.addDocument(doc);
}
@ -580,6 +582,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
doc.add(newTextField("content", "aaa " + index, Field.Store.NO));
doc.add(newTextField("id", "" + index, Field.Store.NO));
doc.add(new NumericDocValuesField("numericdv", 1));
doc.add(new IntPoint("point", 1));
writer.addDocument(doc);
}
}

View File

@ -31,6 +31,7 @@ import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
@ -124,6 +125,7 @@ public class TestIndexWriterOnVMError extends LuceneTestCase {
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);
doc.add(newField("text_vectors", TestUtil.randomAnalysisString(random(), 6, true), ft));
doc.add(new IntPoint("point", random().nextInt()));
if (random().nextInt(10) > 0) {
// single doc

View File

@ -140,6 +140,11 @@ public final class AssertingPointFormat extends PointFormat {
in.merge(mergeState);
}
@Override
public void finish() throws IOException {
in.finish();
}
@Override
public void close() throws IOException {
in.close();