LUCENE-5633: replace NoMergePolicy.COMPOUND/NO_COMPOUND by NoMergePolicy.INSTANCE

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1591432 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shai Erera 2014-04-30 18:13:04 +00:00
parent 0a83273dfa
commit b8c02f6267
30 changed files with 80 additions and 116 deletions

View File

@ -114,6 +114,9 @@ API Changes
deprecated and will be removed in Lucene 5.0. (Uwe Schindler, deprecated and will be removed in Lucene 5.0. (Uwe Schindler,
Robert Muir) Robert Muir)
* LUCENE-5633: Change NoMergePolicy to a singleton with no distinction between
compound and non-compound types. (Shai Erera)
Optimizations Optimizations
* LUCENE-5603: hunspell stemmer more efficiently strips prefixes * LUCENE-5603: hunspell stemmer more efficiently strips prefixes

View File

@ -138,8 +138,9 @@ public class CreateIndexTask extends PerfTask {
final String mergePolicy = config.get("merge.policy", final String mergePolicy = config.get("merge.policy",
"org.apache.lucene.index.LogByteSizeMergePolicy"); "org.apache.lucene.index.LogByteSizeMergePolicy");
boolean isCompound = config.get("compound", true); boolean isCompound = config.get("compound", true);
iwConf.setUseCompoundFile(isCompound);
if (mergePolicy.equals(NoMergePolicy.class.getName())) { if (mergePolicy.equals(NoMergePolicy.class.getName())) {
iwConf.setMergePolicy(isCompound ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); iwConf.setMergePolicy(NoMergePolicy.INSTANCE);
} else { } else {
try { try {
iwConf.setMergePolicy(Class.forName(mergePolicy).asSubclass(MergePolicy.class).newInstance()); iwConf.setMergePolicy(Class.forName(mergePolicy).asSubclass(MergePolicy.class).newInstance());
@ -147,7 +148,7 @@ public class CreateIndexTask extends PerfTask {
throw new RuntimeException("unable to instantiate class '" + mergePolicy + "' as merge policy", e); throw new RuntimeException("unable to instantiate class '" + mergePolicy + "' as merge policy", e);
} }
iwConf.getMergePolicy().setNoCFSRatio(isCompound ? 1.0 : 0.0); iwConf.getMergePolicy().setNoCFSRatio(isCompound ? 1.0 : 0.0);
if(iwConf.getMergePolicy() instanceof LogMergePolicy) { if (iwConf.getMergePolicy() instanceof LogMergePolicy) {
LogMergePolicy logMergePolicy = (LogMergePolicy) iwConf.getMergePolicy(); LogMergePolicy logMergePolicy = (LogMergePolicy) iwConf.getMergePolicy();
logMergePolicy.setMergeFactor(config.get("merge.factor",OpenIndexTask.DEFAULT_MERGE_PFACTOR)); logMergePolicy.setMergeFactor(config.get("merge.factor",OpenIndexTask.DEFAULT_MERGE_PFACTOR));
} }

View File

@ -22,34 +22,16 @@ import java.util.Map;
/** /**
* A {@link MergePolicy} which never returns merges to execute (hence it's * A {@link MergePolicy} which never returns merges to execute. Use it if you
* name). It is also a singleton and can be accessed through * want to prevent segment merges.
* {@link NoMergePolicy#NO_COMPOUND_FILES} if you want to indicate the index
* does not use compound files, or through {@link NoMergePolicy#COMPOUND_FILES}
* otherwise. Use it if you want to prevent an {@link IndexWriter} from ever
* executing merges, without going through the hassle of tweaking a merge
* policy's settings to achieve that, such as changing its merge factor.
*/ */
public final class NoMergePolicy extends MergePolicy { public final class NoMergePolicy extends MergePolicy {
/** /** Singleton instance. */
* A singleton {@link NoMergePolicy} which indicates the index does not use public static final MergePolicy INSTANCE = new NoMergePolicy();
* compound files.
*/
public static final MergePolicy NO_COMPOUND_FILES = new NoMergePolicy(false);
/** private NoMergePolicy() {
* A singleton {@link NoMergePolicy} which indicates the index uses compound super();
* files.
*/
public static final MergePolicy COMPOUND_FILES = new NoMergePolicy(true);
private final boolean useCompoundFile;
private NoMergePolicy(boolean useCompoundFile) {
super(useCompoundFile ? 1.0 : 0.0, 0);
// prevent instantiation
this.useCompoundFile = useCompoundFile;
} }
@Override @Override
@ -66,14 +48,16 @@ public final class NoMergePolicy extends MergePolicy {
public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) { return null; } public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos) { return null; }
@Override @Override
public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) { return useCompoundFile; } public boolean useCompoundFile(SegmentInfos segments, SegmentCommitInfo newSegment) {
return newSegment.info.getUseCompoundFile();
}
@Override @Override
public void setIndexWriter(IndexWriter writer) {} public void setIndexWriter(IndexWriter writer) {}
@Override @Override
protected long size(SegmentCommitInfo info) throws IOException { protected long size(SegmentCommitInfo info) throws IOException {
return Long.MAX_VALUE; return Long.MAX_VALUE;
} }
@Override @Override

View File

@ -417,7 +417,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux, true); setUpDirs(dir, aux, true);
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(aux, dontMergeConfig); IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 20; i++) { for (int i = 0; i < 20; i++) {
writer.deleteDocuments(new Term("id", "" + i)); writer.deleteDocuments(new Term("id", "" + i));
@ -469,7 +469,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(aux, dontMergeConfig); writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++) { for (int i = 0; i < 27; i++) {
writer.deleteDocuments(new Term("id", "" + i)); writer.deleteDocuments(new Term("id", "" + i));
@ -480,7 +480,7 @@ public class TestAddIndexes extends LuceneTestCase {
reader.close(); reader.close();
dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(aux2, dontMergeConfig); writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++) { for (int i = 0; i < 8; i++) {
writer.deleteDocuments(new Term("id", "" + i)); writer.deleteDocuments(new Term("id", "" + i));

View File

@ -629,9 +629,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
addNoProxDoc(writer); addNoProxDoc(writer);
writer.shutdown(); writer.shutdown();
writer = new IndexWriter(dir, writer = new IndexWriter(dir, conf.setMergePolicy(NoMergePolicy.INSTANCE));
conf.setMergePolicy(doCFS ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES)
);
Term searchTerm = new Term("id", "7"); Term searchTerm = new Term("id", "7");
writer.deleteDocuments(searchTerm); writer.deleteDocuments(searchTerm);
writer.shutdown(); writer.shutdown();

View File

@ -148,7 +148,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMaxBufferedDocs(2); // generate few segments conf.setMaxBufferedDocs(2); // generate few segments
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
int numDocs = 10; int numDocs = 10;
long[] expectedValues = new long[numDocs]; long[] expectedValues = new long[numDocs];
@ -235,7 +235,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing conf.setMaxBufferedDocs(10); // control segment flushing
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 6; i++) { for (int i = 0; i < 6; i++) {
@ -851,7 +851,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// prevent merges, otherwise by the time updates are applied // prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes // (writer.shutdown()), the segments might have merged and that update becomes
// legit. // legit.
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
// first segment with BDV // first segment with BDV
@ -906,7 +906,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// prevent merges, otherwise by the time updates are applied // prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes // (writer.shutdown()), the segments might have merged and that update becomes
// legit. // legit.
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
// first segment with BDV // first segment with BDV
@ -1172,7 +1172,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testChangeCodec() throws Exception { public void testChangeCodec() throws Exception {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions. conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene46Codec() { conf.setCodec(new Lucene46Codec() {
@Override @Override
public DocValuesFormat getDocValuesFormatForField(String field) { public DocValuesFormat getDocValuesFormatForField(String field) {

View File

@ -37,7 +37,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testSameFieldNumbersAcrossSegments() throws Exception { public void testSameFieldNumbersAcrossSegments() throws Exception {
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document(); Document d1 = new Document();
d1.add(new StringField("f1", "first field", Field.Store.YES)); d1.add(new StringField("f1", "first field", Field.Store.YES));
@ -46,7 +46,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
if (i == 1) { if (i == 1) {
writer.shutdown(); writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
} else { } else {
writer.commit(); writer.commit();
} }
@ -100,7 +100,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testAddIndexes() throws Exception { public void testAddIndexes() throws Exception {
Directory dir1 = newDirectory(); Directory dir1 = newDirectory();
Directory dir2 = newDirectory(); Directory dir2 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document(); Document d1 = new Document();
d1.add(new TextField("f1", "first field", Field.Store.YES)); d1.add(new TextField("f1", "first field", Field.Store.YES));
@ -108,7 +108,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.addDocument(d1); writer.addDocument(d1);
writer.shutdown(); writer.shutdown();
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document d2 = new Document(); Document d2 = new Document();
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@ -121,7 +121,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
writer.addIndexes(dir2); writer.addIndexes(dir2);
writer.shutdown(); writer.shutdown();
@ -151,7 +151,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{ {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
NoMergePolicy.NO_COMPOUND_FILES)); NoMergePolicy.INSTANCE));
Document d = new Document(); Document d = new Document();
d.add(new TextField("f1", "d1 first field", Field.Store.YES)); d.add(new TextField("f1", "d1 first field", Field.Store.YES));
d.add(new TextField("f2", "d1 second field", Field.Store.YES)); d.add(new TextField("f2", "d1 second field", Field.Store.YES));
@ -168,9 +168,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{ {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
Document d = new Document(); Document d = new Document();
d.add(new TextField("f1", "d2 first field", Field.Store.YES)); d.add(new TextField("f1", "d2 first field", Field.Store.YES));
d.add(new StoredField("f3", new byte[] { 1, 2, 3 })); d.add(new StoredField("f3", new byte[] { 1, 2, 3 }));
@ -190,9 +188,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{ {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
Document d = new Document(); Document d = new Document();
d.add(new TextField("f1", "d3 first field", Field.Store.YES)); d.add(new TextField("f1", "d3 first field", Field.Store.YES));
d.add(new TextField("f2", "d3 second field", Field.Store.YES)); d.add(new TextField("f2", "d3 second field", Field.Store.YES));
@ -217,9 +213,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{ {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
random().nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES));
writer.deleteDocuments(new Term("f1", "d1")); writer.deleteDocuments(new Term("f1", "d1"));
// nuke the first segment entirely so that the segment with gaps is // nuke the first segment entirely so that the segment with gaps is
// loaded first! // loaded first!

View File

@ -652,7 +652,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy) .setIndexDeletionPolicy(policy)
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, conf); writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.deleteDocuments(new Term("id", "" + (i*(N+1)+3))); writer.deleteDocuments(new Term("id", "" + (i*(N+1)+3)));

View File

@ -72,7 +72,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
writer = new IndexWriter( writer = new IndexWriter(
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setUseCompoundFile(true) setMergePolicy(NoMergePolicy.INSTANCE).setUseCompoundFile(true)
); );
Term searchTerm = new Term("id", "7"); Term searchTerm = new Term("id", "7");
writer.deleteDocuments(searchTerm); writer.deleteDocuments(searchTerm);

View File

@ -21,7 +21,6 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.io.StringReader; import java.io.StringReader;
import java.nio.charset.Charset;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
@ -32,7 +31,12 @@ import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.apache.lucene.analysis.*; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
@ -112,7 +116,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.shutdown(); writer.shutdown();
// delete 40 documents // delete 40 documents
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
for (i = 0; i < 40; i++) { for (i = 0; i < 40; i++) {
writer.deleteDocuments(new Term("id", ""+i)); writer.deleteDocuments(new Term("id", ""+i));
} }

View File

@ -20,7 +20,6 @@ package org.apache.lucene.index;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -1025,7 +1024,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999) // note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999)
IndexWriter w = new IndexWriter(dir, IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false)); .setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false));
int count = 0; int count = 0;
while(true) { while(true) {
Document doc = new Document(); Document doc = new Document();
@ -1071,7 +1070,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
final int flushAtDelCount = atLeast(1020); final int flushAtDelCount = atLeast(1020);
IndexWriter w = new IndexWriter(dir, IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false)); setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false));
int count = 0; int count = 0;
while(true) { while(true) {
Document doc = new Document(); Document doc = new Document();
@ -1112,7 +1111,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
final AtomicBoolean sawAfterFlush = new AtomicBoolean(); final AtomicBoolean sawAfterFlush = new AtomicBoolean();
IndexWriter w = new IndexWriter(dir, IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).setReaderPooling(false)) { setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false)) {
@Override @Override
public void doAfterFlush() { public void doAfterFlush() {
assertTrue("only " + docsInSegment.get() + " in segment", closing.get() || docsInSegment.get() >= 7); assertTrue("only " + docsInSegment.get() + " in segment", closing.get() || docsInSegment.get() >= 7);

View File

@ -54,8 +54,8 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.MockDirectoryWrapper; import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -708,9 +708,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
{ {
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1) TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1)
.setMergePolicy( .setMergePolicy(NoMergePolicy.INSTANCE));
random().nextBoolean() ? NoMergePolicy.COMPOUND_FILES
: NoMergePolicy.NO_COMPOUND_FILES));
// don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc. // don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc.
final int finalI = i; final int finalI = i;

View File

@ -196,7 +196,7 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
writer = new IndexWriter( writer = new IndexWriter(
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES) setMergePolicy(NoMergePolicy.INSTANCE)
); );
writer.deleteDocuments(new Term("content", "aaa")); writer.deleteDocuments(new Term("content", "aaa"));
writer.shutdown(); writer.shutdown();

View File

@ -16,7 +16,6 @@ package org.apache.lucene.index;
*/ */
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
@ -156,7 +155,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
ir.close(); ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, dontMergeConfig); writer = new IndexWriter(dir, dontMergeConfig);
writer.deleteDocuments(new Term("id", "0")); writer.deleteDocuments(new Term("id", "0"));
writer.deleteDocuments(new Term("id", "7")); writer.deleteDocuments(new Term("id", "7"));
@ -219,7 +218,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
ir.close(); ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, dontMergeConfig); writer = new IndexWriter(dir, dontMergeConfig);
for(int i=0;i<98;i+=2) { for(int i=0;i<98;i+=2) {
writer.deleteDocuments(new Term("id", "" + i)); writer.deleteDocuments(new Term("id", "" + i));
@ -285,7 +284,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
ir.close(); ir.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, dontMergeConfig); writer = new IndexWriter(dir, dontMergeConfig);
for(int i=0;i<98;i+=2) { for(int i=0;i<98;i+=2) {
writer.deleteDocuments(new Term("id", "" + i)); writer.deleteDocuments(new Term("id", "" + i));

View File

@ -124,11 +124,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
iwc.setMaxBufferedDocs(20); iwc.setMaxBufferedDocs(20);
} }
// no merging // no merging
if (random().nextBoolean()) { iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
} else {
iwc.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
}
if (VERBOSE) { if (VERBOSE) {
System.out.println("TEST: make index"); System.out.println("TEST: make index");
} }
@ -238,11 +234,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
iwc.setMaxBufferedDocs(20); iwc.setMaxBufferedDocs(20);
} }
// no merging // no merging
if (random().nextBoolean()) { iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES);
} else {
iwc.setMergePolicy(NoMergePolicy.COMPOUND_FILES);
}
IndexWriter writer = new IndexWriter(dir1, iwc); IndexWriter writer = new IndexWriter(dir1, iwc);
// create the index // create the index
@ -1084,7 +1076,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
}); });
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges from getting in the way conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges from getting in the way
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
// create a segment and open an NRT reader // create a segment and open an NRT reader

View File

@ -36,7 +36,7 @@ public class TestMultiFields extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
// we can do this because we use NoMergePolicy (and dont merge to "nothing") // we can do this because we use NoMergePolicy (and dont merge to "nothing")
w.setKeepFullyDeletedSegments(true); w.setKeepFullyDeletedSegments(true);

View File

@ -29,20 +29,13 @@ public class TestNoMergePolicy extends LuceneTestCase {
@Test @Test
public void testNoMergePolicy() throws Exception { public void testNoMergePolicy() throws Exception {
MergePolicy mp = NoMergePolicy.NO_COMPOUND_FILES; MergePolicy mp = NoMergePolicy.INSTANCE;
assertNull(mp.findMerges(null, (SegmentInfos)null)); assertNull(mp.findMerges(null, (SegmentInfos)null));
assertNull(mp.findForcedMerges(null, 0, null)); assertNull(mp.findForcedMerges(null, 0, null));
assertNull(mp.findForcedDeletesMerges(null)); assertNull(mp.findForcedDeletesMerges(null));
assertFalse(mp.useCompoundFile(null, null));
mp.close(); mp.close();
} }
@Test
public void testCompoundFiles() throws Exception {
assertFalse(NoMergePolicy.NO_COMPOUND_FILES.useCompoundFile(null, null));
assertTrue(NoMergePolicy.COMPOUND_FILES.useCompoundFile(null, null));
}
@Test @Test
public void testFinalSingleton() throws Exception { public void testFinalSingleton() throws Exception {
assertTrue(Modifier.isFinal(NoMergePolicy.class.getModifiers())); assertTrue(Modifier.isFinal(NoMergePolicy.class.getModifiers()));

View File

@ -127,7 +127,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMaxBufferedDocs(2); // generate few segments conf.setMaxBufferedDocs(2); // generate few segments
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
int numDocs = 10; int numDocs = 10;
long[] expectedValues = new long[numDocs]; long[] expectedValues = new long[numDocs];
@ -212,7 +212,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing conf.setMaxBufferedDocs(10); // control segment flushing
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 6; i++) { for (int i = 0; i < 6; i++) {
@ -830,7 +830,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// prevent merges, otherwise by the time updates are applied // prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes // (writer.shutdown()), the segments might have merged and that update becomes
// legit. // legit.
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
// first segment with NDV // first segment with NDV
@ -884,7 +884,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// prevent merges, otherwise by the time updates are applied // prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes // (writer.shutdown()), the segments might have merged and that update becomes
// legit. // legit.
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
// first segment with NDV // first segment with NDV
@ -1152,7 +1152,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testChangeCodec() throws Exception { public void testChangeCodec() throws Exception {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions. conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene46Codec() { conf.setCodec(new Lucene46Codec() {
@Override @Override
public DocValuesFormat getDocValuesFormatForField(String field) { public DocValuesFormat getDocValuesFormatForField(String field) {

View File

@ -479,7 +479,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
private Directory getDir1(Random random) throws IOException { private Directory getDir1(Random random) throws IOException {
Directory dir1 = newDirectory(); Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document(); Document d1 = new Document();
d1.add(newTextField("f1", "v1", Field.Store.YES)); d1.add(newTextField("f1", "v1", Field.Store.YES));
d1.add(newTextField("f2", "v1", Field.Store.YES)); d1.add(newTextField("f2", "v1", Field.Store.YES));
@ -506,7 +506,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
private Directory getDir2(Random random) throws IOException { private Directory getDir2(Random random) throws IOException {
Directory dir2 = newDirectory(); Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document(); Document d1 = new Document();
d1.add(newTextField("f3", "v1", Field.Store.YES)); d1.add(newTextField("f3", "v1", Field.Store.YES));
d1.add(newTextField("f4", "v1", Field.Store.YES)); d1.add(newTextField("f4", "v1", Field.Store.YES));
@ -533,7 +533,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
private Directory getInvalidStructuredDir2(Random random) throws IOException { private Directory getInvalidStructuredDir2(Random random) throws IOException {
Directory dir2 = newDirectory(); Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document(); Document d1 = new Document();
d1.add(newTextField("f3", "v1", Field.Store.YES)); d1.add(newTextField("f3", "v1", Field.Store.YES));
d1.add(newTextField("f4", "v1", Field.Store.YES)); d1.add(newTextField("f4", "v1", Field.Store.YES));

View File

@ -104,7 +104,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
iw.shutdown(); iw.shutdown();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.COMPOUND_FILES); .setMergePolicy(NoMergePolicy.INSTANCE);
if (VERBOSE) { if (VERBOSE) {
System.out.println("\nTEST: make 2nd writer"); System.out.println("\nTEST: make 2nd writer");
} }

View File

@ -48,7 +48,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB); conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
// prevent any merges by default. // prevent any merges by default.
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
return conf; return conf;
} }

View File

@ -45,9 +45,8 @@ import org.apache.lucene.index.TrackingIndexWriter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NRTCachingDirectory; import org.apache.lucene.store.NRTCachingDirectory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
@ -304,7 +303,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc
*/ */
public void testThreadStarvationNoDeleteNRTReader() throws IOException, InterruptedException { public void testThreadStarvationNoDeleteNRTReader() throws IOException, InterruptedException {
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(random().nextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES); conf.setMergePolicy(NoMergePolicy.INSTANCE);
Directory d = newDirectory(); Directory d = newDirectory();
final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch signal = new CountDownLatch(1); final CountDownLatch signal = new CountDownLatch(1);

View File

@ -651,7 +651,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase {
Directory taxoDir = newDirectory(); Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges iwc.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges
IndexWriter indexWriter = new IndexWriter(indexDir, iwc); IndexWriter indexWriter = new IndexWriter(indexDir, iwc);
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);

View File

@ -242,7 +242,7 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase {
// 4. Segment w/ categories, but only some results // 4. Segment w/ categories, but only some results
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges, so we can control the index segments conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges, so we can control the index segments
IndexWriter indexWriter = new IndexWriter(indexDir, conf); IndexWriter indexWriter = new IndexWriter(indexDir, conf);
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);

View File

@ -286,7 +286,7 @@ public class GroupFacetCollectorTest extends AbstractGroupingTestCase {
random(), random(),
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
boolean useDv = false; boolean useDv = false;
// Cannot assert this since we use NoMergePolicy: // Cannot assert this since we use NoMergePolicy:

View File

@ -65,7 +65,7 @@ public class TestBlockJoin extends LuceneTestCase {
public void testEmptyChildFilter() throws Exception { public void testEmptyChildFilter() throws Exception {
final Directory dir = newDirectory(); final Directory dir = newDirectory();
final IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); final IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
config.setMergePolicy(NoMergePolicy.NO_COMPOUND_FILES); config.setMergePolicy(NoMergePolicy.INSTANCE);
// we don't want to merge - since we rely on certain segment setup // we don't want to merge - since we rely on certain segment setup
final IndexWriter w = new IndexWriter(dir, config); final IndexWriter w = new IndexWriter(dir, config);
@ -387,7 +387,7 @@ public class TestBlockJoin extends LuceneTestCase {
random(), random(),
dir, dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
// Cannot assert this since we use NoMergePolicy: // Cannot assert this since we use NoMergePolicy:
w.setDoRandomForceMergeAssert(false); w.setDoRandomForceMergeAssert(false);

View File

@ -53,7 +53,7 @@ public class TestBlockJoinSorting extends LuceneTestCase {
public void testNestedSorting() throws Exception { public void testNestedSorting() throws Exception {
final Directory dir = newDirectory(); final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
List<Document> docs = new ArrayList<>(); List<Document> docs = new ArrayList<>();
Document document = new Document(); Document document = new Document();

View File

@ -1,4 +1,5 @@
package org.apache.lucene.index; package org.apache.lucene.index;
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
@ -22,7 +23,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
public class TestMultiPassIndexSplitter extends LuceneTestCase { public class TestMultiPassIndexSplitter extends LuceneTestCase {
IndexReader input; IndexReader input;
@ -33,7 +33,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase {
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
dir = newDirectory(); dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
Document doc; Document doc;
for (int i = 0; i < NUM_DOCS; i++) { for (int i = 0; i < NUM_DOCS; i++) {
doc = new Document(); doc = new Document();

View File

@ -38,7 +38,7 @@ public class TestPKIndexSplitter extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setOpenMode(OpenMode.CREATE).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); .setOpenMode(OpenMode.CREATE).setMergePolicy(NoMergePolicy.INSTANCE));
for (int x = 0; x < 11; x++) { for (int x = 0; x < 11; x++) {
Document doc = createDocument(x, "1", 3, format); Document doc = createDocument(x, "1", 3, format);
w.addDocument(doc); w.addDocument(doc);
@ -58,7 +58,7 @@ public class TestPKIndexSplitter extends LuceneTestCase {
// delete some documents // delete some documents
w = new IndexWriter(dir, newIndexWriterConfig( w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setOpenMode(OpenMode.APPEND).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); .setOpenMode(OpenMode.APPEND).setMergePolicy(NoMergePolicy.INSTANCE));
w.deleteDocuments(midTerm); w.deleteDocuments(midTerm);
w.deleteDocuments(new Term("id", format.format(2))); w.deleteDocuments(new Term("id", format.format(2)));
w.shutdown(); w.shutdown();

View File

@ -651,7 +651,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat
public void testBulkMergeWithDeletes() throws IOException { public void testBulkMergeWithDeletes() throws IOException {
final int numDocs = atLeast(200); final int numDocs = atLeast(200);
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
for (int i = 0; i < numDocs; ++i) { for (int i = 0; i < numDocs; ++i) {
Document doc = new Document(); Document doc = new Document();
doc.add(new StringField("id", Integer.toString(i), Store.YES)); doc.add(new StringField("id", Integer.toString(i), Store.YES));