fix test/test-framework/real bug causing OOME in TestPostingsFormat

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1380808 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-09-04 18:52:19 +00:00
parent 635699a563
commit ea8ecb15a4
5 changed files with 41 additions and 9 deletions

View File

@ -138,6 +138,10 @@ Bug Fixes
* LUCENE-4333: Fixed NPE in TermGroupFacetCollector when faceting on mv fields. * LUCENE-4333: Fixed NPE in TermGroupFacetCollector when faceting on mv fields.
(Jesse MacVicar, Martijn van Groningen) (Jesse MacVicar, Martijn van Groningen)
* NRTCachingDirectory was always caching a newly flushed segment in
RAM, instead of checking the estimated size of the segment
to decide whether to cache it. (Mike McCandless)
Optimizations Optimizations
* LUCENE-4322: Decrease lucene-core JAR size. The core JAR size had increased a * LUCENE-4322: Decrease lucene-core JAR size. The core JAR size had increased a

View File

@ -267,9 +267,16 @@ public class NRTCachingDirectory extends Directory {
/** Subclass can override this to customize logic; return /** Subclass can override this to customize logic; return
* true if this file should be written to the RAMDirectory. */ * true if this file should be written to the RAMDirectory. */
protected boolean doCacheWrite(String name, IOContext context) { protected boolean doCacheWrite(String name, IOContext context) {
final MergeInfo merge = context.mergeInfo;
//System.out.println(Thread.currentThread().getName() + ": CACHE check merge=" + merge + " size=" + (merge==null ? 0 : merge.estimatedMergeBytes)); //System.out.println(Thread.currentThread().getName() + ": CACHE check merge=" + merge + " size=" + (merge==null ? 0 : merge.estimatedMergeBytes));
return !name.equals(IndexFileNames.SEGMENTS_GEN) && (merge == null || merge.estimatedMergeBytes <= maxMergeSizeBytes) && cache.sizeInBytes() <= maxCachedBytes;
long bytes = 0;
if (context.mergeInfo != null) {
bytes = context.mergeInfo.estimatedMergeBytes;
} else if (context.flushInfo != null) {
bytes = context.flushInfo.estimatedSegmentSize;
}
return !name.equals(IndexFileNames.SEGMENTS_GEN) && (bytes <= maxMergeSizeBytes) && (bytes + cache.sizeInBytes()) <= maxCachedBytes;
} }
private final Object uncacheLock = new Object(); private final Object uncacheLock = new Object();

View File

@ -38,12 +38,14 @@ import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.codecs.TermsConsumer; import org.apache.lucene.codecs.TermsConsumer;
import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FlushInfo;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util._TestUtil; import org.apache.lucene.util._TestUtil;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -379,10 +381,13 @@ public class TestPostingsFormat extends LuceneTestCase {
FieldInfos newFieldInfos = new FieldInfos(newFieldInfoArray); FieldInfos newFieldInfos = new FieldInfos(newFieldInfoArray);
// Estimate that flushed segment size will be 25% of
// what we use in RAM:
long bytes = RamUsageEstimator.sizeOf(fields)/4;
SegmentWriteState writeState = new SegmentWriteState(null, dir, SegmentWriteState writeState = new SegmentWriteState(null, dir,
segmentInfo, newFieldInfos, segmentInfo, newFieldInfos,
32, null, IOContext.DEFAULT); 32, null, new IOContext(new FlushInfo(maxDocID, bytes)));
FieldsConsumer fieldsConsumer = Codec.getDefault().postingsFormat().fieldsConsumer(writeState); FieldsConsumer fieldsConsumer = Codec.getDefault().postingsFormat().fieldsConsumer(writeState);
for(Map.Entry<String,Map<BytesRef,List<Posting>>> fieldEnt : fields.entrySet()) { for(Map.Entry<String,Map<BytesRef,List<Posting>>> fieldEnt : fields.entrySet()) {

View File

@ -443,7 +443,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
} }
//System.out.println(Thread.currentThread().getName() + ": MDW: create " + name); //System.out.println(Thread.currentThread().getName() + ": MDW: create " + name);
IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)), name); IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name, LuceneTestCase.newIOContext(randomState, context)), name);
addFileHandle(io, name, Handle.Output); addFileHandle(io, name, Handle.Output);
openFilesForWrite.add(name); openFilesForWrite.add(name);
@ -497,7 +497,7 @@ public class MockDirectoryWrapper extends BaseDirectoryWrapper {
throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false);
} }
IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name, LuceneTestCase.newIOContext(randomState))); IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name, LuceneTestCase.newIOContext(randomState, context)));
addFileHandle(ii, name, Handle.Input); addFileHandle(ii, name, Handle.Input);
return ii; return ii;
} }

View File

@ -1130,8 +1130,23 @@ public abstract class LuceneTestCase extends Assert {
/** TODO: javadoc */ /** TODO: javadoc */
public static IOContext newIOContext(Random random) { public static IOContext newIOContext(Random random) {
return newIOContext(random, IOContext.DEFAULT);
}
/** TODO: javadoc */
public static IOContext newIOContext(Random random, IOContext oldContext) {
final int randomNumDocs = random.nextInt(4192); final int randomNumDocs = random.nextInt(4192);
final int size = random.nextInt(512) * randomNumDocs; final int size = random.nextInt(512) * randomNumDocs;
if (oldContext.flushInfo != null) {
// Always return at least the estimatedSegmentSize of
// the incoming IOContext:
return new IOContext(new FlushInfo(randomNumDocs, Math.max(oldContext.flushInfo.estimatedSegmentSize, size)));
} else if (oldContext.mergeInfo != null) {
// Always return at least the estimatedMergeBytes of
// the incoming IOContext:
return new IOContext(new MergeInfo(randomNumDocs, Math.max(oldContext.mergeInfo.estimatedMergeBytes, size), random.nextBoolean(), _TestUtil.nextInt(random, 1, 100)));
} else {
// Make a totally random IOContext:
final IOContext context; final IOContext context;
switch (random.nextInt(5)) { switch (random.nextInt(5)) {
case 0: case 0:
@ -1154,6 +1169,7 @@ public abstract class LuceneTestCase extends Assert {
} }
return context; return context;
} }
}
/** /**
* Create a new searcher over the reader. This searcher might randomly use * Create a new searcher over the reader. This searcher might randomly use