LUCENE-2486: incRef the doc store files so we can load them when opening the reader for warming

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@951521 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2010-06-04 18:42:15 +00:00
parent 39edb867b2
commit 27b2e1054c
5 changed files with 41 additions and 21 deletions

View File

@ -472,6 +472,10 @@ Bug fixes
that warming is free to do whatever it needs to. (Earwin Burrfoot that warming is free to do whatever it needs to. (Earwin Burrfoot
via Mike McCandless) via Mike McCandless)
* LUCENE-2486: Fixed intermittent FileNotFoundException on doc store
files when a mergedSegmentWarmer is set on IndexWriter. (Mike
McCandless)
New features New features
* LUCENE-2128: Parallelized fetching document frequencies during weight * LUCENE-2128: Parallelized fetching document frequencies during weight

View File

@ -465,7 +465,7 @@ final class IndexFileDeleter {
} }
} }
void incRef(List<String> files) throws IOException { void incRef(Collection<String> files) throws IOException {
for(final String file : files) { for(final String file : files) {
incRef(file); incRef(file);
} }

View File

@ -3537,25 +3537,7 @@ public class IndexWriter implements Closeable {
commitMergedDeletes(merge, mergedReader); commitMergedDeletes(merge, mergedReader);
docWriter.remapDeletes(segmentInfos, merger.getDocMaps(), merger.getDelCounts(), merge, mergedDocCount); docWriter.remapDeletes(segmentInfos, merger.getDocMaps(), merger.getDelCounts(), merge, mergedDocCount);
// Simple optimization: if the doc store we are using setMergeDocStoreIsCompoundFile(merge);
// has been closed and is in now compound format (but
// wasn't when we started), then we will switch to the
// compound format as well:
final String mergeDocStoreSegment = merge.info.getDocStoreSegment();
if (mergeDocStoreSegment != null && !merge.info.getDocStoreIsCompoundFile()) {
final int size = segmentInfos.size();
for(int i=0;i<size;i++) {
final SegmentInfo info = segmentInfos.info(i);
final String docStoreSegment = info.getDocStoreSegment();
if (docStoreSegment != null &&
docStoreSegment.equals(mergeDocStoreSegment) &&
info.getDocStoreIsCompoundFile()) {
merge.info.setDocStoreIsCompoundFile(true);
break;
}
}
}
merge.info.setHasProx(merger.hasProx()); merge.info.setHasProx(merger.hasProx());
segmentInfos.subList(start, start + merge.segments.size()).clear(); segmentInfos.subList(start, start + merge.segments.size()).clear();
@ -3903,6 +3885,11 @@ public class IndexWriter implements Closeable {
if (merge.increfDone) if (merge.increfDone)
decrefMergeSegments(merge); decrefMergeSegments(merge);
if (merge.mergeFiles != null) {
deleter.decRef(merge.mergeFiles);
merge.mergeFiles = null;
}
// It's possible we are called twice, eg if there was an // It's possible we are called twice, eg if there was an
// exception inside mergeInit // exception inside mergeInit
if (merge.registerDone) { if (merge.registerDone) {
@ -3917,6 +3904,23 @@ public class IndexWriter implements Closeable {
runningMerges.remove(merge); runningMerges.remove(merge);
} }
private synchronized void setMergeDocStoreIsCompoundFile(MergePolicy.OneMerge merge) {
final String mergeDocStoreSegment = merge.info.getDocStoreSegment();
if (mergeDocStoreSegment != null && !merge.info.getDocStoreIsCompoundFile()) {
final int size = segmentInfos.size();
for(int i=0;i<size;i++) {
final SegmentInfo info = segmentInfos.info(i);
final String docStoreSegment = info.getDocStoreSegment();
if (docStoreSegment != null &&
docStoreSegment.equals(mergeDocStoreSegment) &&
info.getDocStoreIsCompoundFile()) {
merge.info.setDocStoreIsCompoundFile(true);
break;
}
}
}
}
/** Does the actual (time-consuming) work of the merge, /** Does the actual (time-consuming) work of the merge,
* but without holding synchronized lock on IndexWriter * but without holding synchronized lock on IndexWriter
* instance */ * instance */
@ -4037,6 +4041,17 @@ public class IndexWriter implements Closeable {
final int termsIndexDivisor; final int termsIndexDivisor;
final boolean loadDocStores; final boolean loadDocStores;
synchronized(this) {
// If the doc store we are using has been closed and
// is in now compound format (but wasn't when we
// started), then we will switch to the compound
// format as well:
setMergeDocStoreIsCompoundFile(merge);
assert merge.mergeFiles == null;
merge.mergeFiles = merge.info.files();
deleter.incRef(merge.mergeFiles);
}
if (poolReaders && mergedSegmentWarmer != null) { if (poolReaders && mergedSegmentWarmer != null) {
// Load terms index & doc stores so the segment // Load terms index & doc stores so the segment
// warmer can run searches, load documents/term // warmer can run searches, load documents/term

View File

@ -76,6 +76,7 @@ public abstract class MergePolicy implements java.io.Closeable {
int maxNumSegmentsOptimize; // used by IndexWriter int maxNumSegmentsOptimize; // used by IndexWriter
SegmentReader[] readers; // used by IndexWriter SegmentReader[] readers; // used by IndexWriter
SegmentReader[] readersClone; // used by IndexWriter SegmentReader[] readersClone; // used by IndexWriter
List<String> mergeFiles; // used by IndexWriter
final SegmentInfos segments; final SegmentInfos segments;
final boolean useCompoundFile; final boolean useCompoundFile;
boolean aborted; boolean aborted;

View File

@ -542,7 +542,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2); ((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(2);
for (int i = 0; i < 10; i++) { for (int i = 0; i < 100*_TestUtil.getRandomMultiplier(); i++) {
writer.addDocument(createDocument(i, "test", 4)); writer.addDocument(createDocument(i, "test", 4));
} }
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync(); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();