LUCENE-6082: remove abort() from codec apis

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1642558 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-11-30 15:58:29 +00:00
parent bdc977613d
commit effff3b1c7
17 changed files with 13 additions and 88 deletions

View File

@ -227,6 +227,8 @@ API Changes
* LUCENE-6068: LeafReader.fields() never returns null. (Robert Muir)
* LUCENE-6082: Remove abort() from codec apis. (Robert Muir)
Bug Fixes
* LUCENE-5650: Enforce read-only access to any path outside the temporary

View File

@ -205,7 +205,6 @@ public class SimpleTextSegmentInfoFormat extends SegmentInfoFormat {
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(output);
IOUtils.deleteFilesIgnoringExceptions(dir, segFileName);
} else {
output.close();
}

View File

@ -70,7 +70,7 @@ public class SimpleTextStoredFieldsWriter extends StoredFieldsWriter {
success = true;
} finally {
if (!success) {
abort();
IOUtils.closeWhileHandlingException(this);
}
}
}
@ -151,14 +151,6 @@ public class SimpleTextStoredFieldsWriter extends StoredFieldsWriter {
}
}
@Override
public void abort() {
try {
close();
} catch (Throwable ignored) {}
IOUtils.deleteFilesIgnoringExceptions(directory, IndexFileNames.segmentFileName(segment, "", FIELDS_EXTENSION));
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
if (numDocsWritten != numDocs) {

View File

@ -74,7 +74,7 @@ public class SimpleTextTermVectorsWriter extends TermVectorsWriter {
success = true;
} finally {
if (!success) {
abort();
IOUtils.closeWhileHandlingException(this);
}
}
}
@ -163,14 +163,6 @@ public class SimpleTextTermVectorsWriter extends TermVectorsWriter {
}
}
@Override
public void abort() {
try {
close();
} catch (Throwable ignored) {}
IOUtils.deleteFilesIgnoringExceptions(directory, IndexFileNames.segmentFileName(segment, "", VECTORS_EXTENSION));
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
if (numDocsWritten != numDocs) {

View File

@ -48,7 +48,8 @@ public abstract class SegmentInfoFormat {
public abstract SegmentInfo read(Directory directory, String segmentName, byte segmentID[], IOContext context) throws IOException;
/**
* Write {@link SegmentInfo} data.
* Write {@link SegmentInfo} data.
* The codec must add its SegmentInfo filename(s) to {@code info} before doing i/o.
* @throws IOException If an I/O error occurs
*/
public abstract void write(Directory dir, SegmentInfo info, IOContext ioContext) throws IOException;

View File

@ -60,10 +60,6 @@ public abstract class StoredFieldsWriter implements Closeable {
/** Writes a single stored field. */
public abstract void writeField(FieldInfo info, StorableField field) throws IOException;
/** Aborts writing entirely, implementation should remove
* any partially-written files, etc. */
public abstract void abort();
/** Called before {@link #close()}, passing in the number
* of documents that were written. Note that this is

View File

@ -93,10 +93,6 @@ public abstract class TermVectorsWriter implements Closeable {
/** Adds a term position and offsets */
public abstract void addPosition(int position, int startOffset, int endOffset, BytesRef payload) throws IOException;
/** Aborts writing entirely, implementation should remove
* any partially-written files, etc. */
public abstract void abort();
/** Called before {@link #close()}, passing in the number
* of documents that were written. Note that this is

View File

@ -128,8 +128,7 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(indexStream);
abort();
IOUtils.closeWhileHandlingException(fieldsStream, indexStream, indexWriter);
}
}
}
@ -303,14 +302,6 @@ public final class CompressingStoredFieldsWriter extends StoredFieldsWriter {
}
}
@Override
public void abort() {
IOUtils.closeWhileHandlingException(this);
IOUtils.deleteFilesIgnoringExceptions(directory,
IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION),
IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_INDEX_EXTENSION));
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
if (numBufferedDocs > 0) {

View File

@ -248,8 +248,7 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
success = true;
} finally {
if (!success) {
IOUtils.closeWhileHandlingException(indexStream);
abort();
IOUtils.closeWhileHandlingException(vectorsStream, indexStream, indexWriter);
}
}
}
@ -264,14 +263,6 @@ public final class CompressingTermVectorsWriter extends TermVectorsWriter {
}
}
@Override
public void abort() {
IOUtils.closeWhileHandlingException(this);
IOUtils.deleteFilesIgnoringExceptions(directory,
IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_EXTENSION),
IndexFileNames.segmentFileName(segment, segmentSuffix, VECTORS_INDEX_EXTENSION));
}
@Override
public void startDocument(int numVectorFields) throws IOException {
curDoc = addDocData(numVectorFields);

View File

@ -118,7 +118,6 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene50SegmentInfoFormat.SI_EXTENSION);
si.addFile(fileName);
boolean success = false;
try (IndexOutput output = dir.createOutput(fileName, ioContext)) {
CodecUtil.writeIndexHeader(output,
Lucene50SegmentInfoFormat.CODEC_NAME,
@ -146,12 +145,6 @@ public class Lucene50SegmentInfoFormat extends SegmentInfoFormat {
}
output.writeStringSet(files);
CodecUtil.writeFooter(output);
success = true;
} finally {
if (!success) {
// TODO: are we doing this outside of the tracking wrapper? why must SIWriter cleanup like this?
IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName);
}
}
}

View File

@ -217,11 +217,7 @@ final class DefaultIndexingChain extends DocConsumer {
@Override
public void abort() {
try {
// E.g. close any open files in the stored fields writer:
storedFieldsWriter.abort();
} catch (Throwable t) {
}
IOUtils.closeWhileHandlingException(storedFieldsWriter);
try {
// E.g. close any open files in the term vectors writer:

View File

@ -126,7 +126,7 @@ final class TermVectorsConsumer extends TermsHash {
super.abort();
} finally {
if (writer != null) {
writer.abort();
IOUtils.closeWhileHandlingException(writer);
writer = null;
}

View File

@ -84,6 +84,8 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
iw.commit();
}
finally {
// next event will cause IW to delete the old files: we use prepareCommit just as example
iw.prepareCommit();
int counter = 0;
for (String fileName : dir.listAll()) {
if (fileName.endsWith(".fdt") || fileName.endsWith(".fdx")) {
@ -92,7 +94,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
}
// Only one .fdt and one .fdx files must have been found
assertEquals(2, counter);
iw.close();
iw.rollback();
dir.close();
}
}

View File

@ -142,11 +142,6 @@ public class AssertingStoredFieldsFormat extends StoredFieldsFormat {
in.writeField(info, field);
}
@Override
public void abort() {
in.abort();
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
assert docStatus == (numDocs > 0 ? Status.FINISHED : Status.UNDEFINED);

View File

@ -194,11 +194,6 @@ public class AssertingTermVectorsFormat extends TermVectorsFormat {
--positionCount;
}
@Override
public void abort() {
in.abort();
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
assert docCount == numDocs;

View File

@ -63,14 +63,6 @@ class CrankyStoredFieldsFormat extends StoredFieldsFormat {
this.random = random;
}
@Override
public void abort() {
delegate.abort();
if (random.nextInt(100) == 0) {
throw new RuntimeException(new IOException("Fake IOException from StoredFieldsWriter.abort()"));
}
}
@Override
public void finish(FieldInfos fis, int numDocs) throws IOException {
if (random.nextInt(100) == 0) {

View File

@ -63,14 +63,6 @@ class CrankyTermVectorsFormat extends TermVectorsFormat {
this.random = random;
}
@Override
public void abort() {
delegate.abort();
if (random.nextInt(100) == 0) {
throw new RuntimeException(new IOException("Fake IOException from TermVectorsWriter.abort()"));
}
}
@Override
public int merge(MergeState mergeState) throws IOException {
if (random.nextInt(100) == 0) {