don't use MEMORY_DOC or MEMORY_CHUNK w/ CompressingStoredFields with this test

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1402471 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2012-10-26 11:46:39 +00:00
parent 1be0d9bb68
commit 400d1e0290
1 changed files with 23 additions and 6 deletions

View File

@ -20,6 +20,10 @@ package org.apache.lucene.index;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.codecs.compressing.CompressingCodec;
import org.apache.lucene.codecs.compressing.CompressingStoredFieldsFormat;
import org.apache.lucene.codecs.compressing.CompressingStoredFieldsIndex;
import org.apache.lucene.codecs.compressing.CompressionMode;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
@ -27,12 +31,13 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
/**
* Test indexes ~82M docs with 26 terms each, so you get > Integer.MAX_VALUE terms/docs pairs
@ -48,14 +53,26 @@ public class Test2BPostings extends LuceneTestCase {
if (dir instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(false, 10))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE));
.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
if (iwc.getCodec() instanceof CompressingCodec) {
CompressingStoredFieldsFormat fmt = (CompressingStoredFieldsFormat) ((CompressingCodec) iwc.getCodec()).storedFieldsFormat();
// NOTE: copied from CompressingCodec.randomInstance(), but fixed to not
// use any memory index ... maybe we can instead add
// something like CompressingMemory to the
// SuppressCodecs list...?:
final CompressionMode mode = RandomPicks.randomFrom(random(), CompressionMode.values());
final int chunkSize = RandomInts.randomIntBetween(random(), 1, 500);
iwc.setCodec(new CompressingCodec(mode, chunkSize, CompressingStoredFieldsIndex.DISK_DOC));
}
IndexWriter w = new IndexWriter(dir, iwc);
MergePolicy mp = w.getConfig().getMergePolicy();
if (mp instanceof LogByteSizeMergePolicy) {