mirror of https://github.com/apache/lucene.git
LUCENE-4456: more fixes that are only exposed by additional random sleeps
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1394314 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
532f82806f
commit
4a41604dca
|
@ -492,6 +492,7 @@ final class DocumentsWriter {
|
||||||
throws IOException {
|
throws IOException {
|
||||||
assert newSegment != null;
|
assert newSegment != null;
|
||||||
assert newSegment.segmentInfo != null;
|
assert newSegment.segmentInfo != null;
|
||||||
|
//System.out.println("FLUSH: " + newSegment.segmentInfo.info.name);
|
||||||
final SegmentInfoPerCommit segInfo = indexWriter.prepareFlushedSegment(newSegment);
|
final SegmentInfoPerCommit segInfo = indexWriter.prepareFlushedSegment(newSegment);
|
||||||
final BufferedDeletes deletes = newSegment.segmentDeletes;
|
final BufferedDeletes deletes = newSegment.segmentDeletes;
|
||||||
if (infoStream.isEnabled("DW")) {
|
if (infoStream.isEnabled("DW")) {
|
||||||
|
|
|
@ -3211,14 +3211,17 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
deleter.deleteNewFiles(merge.info.files());
|
deleter.deleteNewFiles(merge.info.files());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Must note the change to segmentInfos so any commits
|
||||||
|
// in-flight don't lose it (IFD will incRef/protect the
|
||||||
|
// new files we created):
|
||||||
|
checkpoint();
|
||||||
|
|
||||||
// Must close before checkpoint, otherwise IFD won't be
|
// Must close before checkpoint, otherwise IFD won't be
|
||||||
// able to delete the held-open files from the merge
|
// able to delete the held-open files from the merge
|
||||||
// readers:
|
// readers:
|
||||||
closeMergeReaders(merge, false);
|
closeMergeReaders(merge, false);
|
||||||
|
|
||||||
// Must note the change to segmentInfos so any commits
|
deleter.deletePendingFiles();
|
||||||
// in-flight don't lose it:
|
|
||||||
checkpoint();
|
|
||||||
|
|
||||||
if (infoStream.isEnabled("IW")) {
|
if (infoStream.isEnabled("IW")) {
|
||||||
infoStream.message("IW", "after commitMerge: " + segString());
|
infoStream.message("IW", "after commitMerge: " + segString());
|
||||||
|
@ -3282,6 +3285,9 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
try {
|
try {
|
||||||
try {
|
try {
|
||||||
mergeInit(merge);
|
mergeInit(merge);
|
||||||
|
//if (merge.info != null) {
|
||||||
|
//System.out.println("MERGE: " + merge.info.info.name);
|
||||||
|
//}
|
||||||
|
|
||||||
if (infoStream.isEnabled("IW")) {
|
if (infoStream.isEnabled("IW")) {
|
||||||
infoStream.message("IW", "now merge\n merge=" + segString(merge.segments) + "\n index=" + segString());
|
infoStream.message("IW", "now merge\n merge=" + segString(merge.segments) + "\n index=" + segString());
|
||||||
|
@ -3672,7 +3678,18 @@ public class IndexWriter implements Closeable, TwoPhaseCommit {
|
||||||
merge.checkAborted(directory);
|
merge.checkAborted(directory);
|
||||||
|
|
||||||
// This is where all the work happens:
|
// This is where all the work happens:
|
||||||
MergeState mergeState = merger.merge();
|
MergeState mergeState;
|
||||||
|
boolean success3 = false;
|
||||||
|
try {
|
||||||
|
mergeState = merger.merge();
|
||||||
|
success3 = true;
|
||||||
|
} finally {
|
||||||
|
if (!success3) {
|
||||||
|
synchronized(this) {
|
||||||
|
deleter.refresh(merge.info.info.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
assert mergeState.segmentInfo == merge.info.info;
|
assert mergeState.segmentInfo == merge.info.info;
|
||||||
merge.info.info.setFiles(new HashSet<String>(dirWrapper.getCreatedFiles()));
|
merge.info.info.setFiles(new HashSet<String>(dirWrapper.getCreatedFiles()));
|
||||||
|
|
||||||
|
|
|
@ -733,25 +733,19 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfoPerCom
|
||||||
|
|
||||||
final void rollbackCommit(Directory dir) {
|
final void rollbackCommit(Directory dir) {
|
||||||
if (pendingSegnOutput != null) {
|
if (pendingSegnOutput != null) {
|
||||||
try {
|
// Suppress so we keep throwing the original exception
|
||||||
pendingSegnOutput.close();
|
// in our caller
|
||||||
} catch (Throwable t) {
|
IOUtils.closeWhileHandlingException(pendingSegnOutput);
|
||||||
// Suppress so we keep throwing the original exception
|
pendingSegnOutput = null;
|
||||||
// in our caller
|
|
||||||
}
|
|
||||||
|
|
||||||
// Must carefully compute fileName from "generation"
|
// Must carefully compute fileName from "generation"
|
||||||
// since lastGeneration isn't incremented:
|
// since lastGeneration isn't incremented:
|
||||||
try {
|
final String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
|
||||||
final String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
|
"",
|
||||||
"",
|
generation);
|
||||||
generation);
|
// Suppress so we keep throwing the original exception
|
||||||
dir.deleteFile(segmentFileName);
|
// in our caller
|
||||||
} catch (Throwable t) {
|
IOUtils.deleteFilesIgnoringExceptions(dir, segmentFileName);
|
||||||
// Suppress so we keep throwing the original exception
|
|
||||||
// in our caller
|
|
||||||
}
|
|
||||||
pendingSegnOutput = null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -813,8 +807,19 @@ public final class SegmentInfos implements Cloneable, Iterable<SegmentInfoPerCom
|
||||||
IOUtils.closeWhileHandlingException(pendingSegnOutput);
|
IOUtils.closeWhileHandlingException(pendingSegnOutput);
|
||||||
rollbackCommit(dir);
|
rollbackCommit(dir);
|
||||||
} else {
|
} else {
|
||||||
pendingSegnOutput.close();
|
success = false;
|
||||||
pendingSegnOutput = null;
|
try {
|
||||||
|
pendingSegnOutput.close();
|
||||||
|
success = true;
|
||||||
|
} finally {
|
||||||
|
if (!success) {
|
||||||
|
final String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
|
||||||
|
"",
|
||||||
|
generation);
|
||||||
|
IOUtils.deleteFilesIgnoringExceptions(dir, segmentFileName);
|
||||||
|
}
|
||||||
|
pendingSegnOutput = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -340,7 +340,7 @@ final class StandardDirectoryReader extends DirectoryReader {
|
||||||
try {
|
try {
|
||||||
r.decRef();
|
r.decRef();
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
if (t == null) firstExc = t;
|
if (firstExc == null) firstExc = t;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -129,6 +129,8 @@ final class CompoundFileWriter implements Closeable{
|
||||||
}
|
}
|
||||||
IOException priorException = null;
|
IOException priorException = null;
|
||||||
IndexOutput entryTableOut = null;
|
IndexOutput entryTableOut = null;
|
||||||
|
// TODO this code should clean up after itself
|
||||||
|
// (remove partial .cfs/.cfe)
|
||||||
try {
|
try {
|
||||||
if (!pendingEntries.isEmpty() || outputTaken.get()) {
|
if (!pendingEntries.isEmpty() || outputTaken.get()) {
|
||||||
throw new IllegalStateException("CFS has pending open files");
|
throw new IllegalStateException("CFS has pending open files");
|
||||||
|
@ -137,8 +139,6 @@ final class CompoundFileWriter implements Closeable{
|
||||||
// open the compound stream
|
// open the compound stream
|
||||||
getOutput();
|
getOutput();
|
||||||
assert dataOut != null;
|
assert dataOut != null;
|
||||||
long finalLength = dataOut.getFilePointer();
|
|
||||||
assert assertFileLength(finalLength, dataOut);
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
priorException = e;
|
priorException = e;
|
||||||
} finally {
|
} finally {
|
||||||
|
@ -154,14 +154,6 @@ final class CompoundFileWriter implements Closeable{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean assertFileLength(long expected, IndexOutput out)
|
|
||||||
throws IOException {
|
|
||||||
out.flush();
|
|
||||||
assert expected == out.length() : "expected: " + expected + " was "
|
|
||||||
+ out.length();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private final void ensureOpen() {
|
private final void ensureOpen() {
|
||||||
if (closed) {
|
if (closed) {
|
||||||
throw new AlreadyClosedException("CFS Directory is already closed");
|
throw new AlreadyClosedException("CFS Directory is already closed");
|
||||||
|
|
|
@ -1039,7 +1039,7 @@ public class TestIndexWriter extends LuceneTestCase {
|
||||||
allowInterrupt = true;
|
allowInterrupt = true;
|
||||||
}
|
}
|
||||||
} catch (ThreadInterruptedException re) {
|
} catch (ThreadInterruptedException re) {
|
||||||
if (VERBOSE) {
|
if (true || VERBOSE) {
|
||||||
System.out.println("TEST: got interrupt");
|
System.out.println("TEST: got interrupt");
|
||||||
re.printStackTrace(System.out);
|
re.printStackTrace(System.out);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue