LUCENE-5859: remove Version param from LuceneTestCase.newIndexWriterConfig, the grand sum of 2 tests making use of it can use the 3-arg version and reduce the noise everywhere else

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1614698 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2014-07-30 16:20:24 +00:00
parent 8fa6c59beb
commit 3f4b2b472f
244 changed files with 1322 additions and 1287 deletions

View File

@ -52,8 +52,7 @@ public class TestEmptyTokenStream extends BaseTokenStreamTestCase {
public void testIndexWriter_LUCENE4656() throws IOException {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, null));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(null));
TokenStream ts = new EmptyTokenStream();
assertFalse(ts.hasAttribute(TermToBytesRefAttribute.class));

View File

@ -90,7 +90,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
public void testEndOffsetPositionWithTeeSinkTokenFilter() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
TokenStream tokenStream = analyzer.tokenStream("field", "abcd ");
TeeSinkTokenFilter tee = new TeeSinkTokenFilter(tokenStream);

View File

@ -56,7 +56,7 @@ public class Test10KPulsings extends LuceneTestCase {
BaseDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
Document document = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);
@ -107,7 +107,7 @@ public class Test10KPulsings extends LuceneTestCase {
BaseDirectoryWrapper dir = newFSDirectory(f);
dir.setCheckIndexOnClose(false); // we do this ourselves explicitly
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
Document document = new Document();
FieldType ft = new FieldType(TextField.TYPE_STORED);

View File

@ -47,7 +47,7 @@ public class TestPulsingReuse extends LuceneTestCase {
Codec cp = TestUtil.alwaysPostingsFormat(new Pulsing41PostingsFormat(1));
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
Document doc = new Document();
doc.add(new TextField("foo", "a b b c c c d e f g g h i i j j k", Field.Store.NO));
iw.addDocument(doc);
@ -85,7 +85,7 @@ public class TestPulsingReuse extends LuceneTestCase {
Codec cp = TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat());
BaseDirectoryWrapper dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
Document doc = new Document();
doc.add(new TextField("foo", "a b b c c c d e f g g g h i i j j k l l m m m", Field.Store.NO));
// note: the reuse is imperfect, here we would have 4 enums (lost reuse when we get an enum for 'm')

View File

@ -69,7 +69,7 @@ public class TestExternalCodecs extends LuceneTestCase {
dir.setCheckIndexOnClose(false); // we use a custom codec provider
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setCodec(new CustomPerFieldCodec()).
setMergePolicy(newLogMergePolicy(3))
);

View File

@ -93,8 +93,8 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
Field idField = newStringField("id", "", Field.Store.YES);
doc.add(idField);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new MyMergeScheduler())
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new MyMergeScheduler())
.setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergePolicy(newLogMergePolicy()));
LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy();

View File

@ -42,7 +42,7 @@ public class TestSearch extends LuceneTestCase {
Directory directory = newDirectory();
try {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
IndexWriter writer = new IndexWriter(directory, conf);
try {
@ -110,7 +110,7 @@ public class TestSearch extends LuceneTestCase {
throws Exception {
Directory directory = newDirectory();
Analyzer analyzer = new MockAnalyzer(random);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(directory, conf);

View File

@ -70,7 +70,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
private void doTest(Random random, PrintWriter out, boolean useCompoundFiles, int MAX_DOCS) throws Exception {
Directory directory = newDirectory();
Analyzer analyzer = new MockAnalyzer(random);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
final MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFiles ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(directory, conf);

View File

@ -46,7 +46,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
@Test(expected=IllegalArgumentException.class)
public void testDeletePartiallyWrittenFilesIfAbort() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random()));
iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30));
iwConf.setCodec(CompressingCodec.randomInstance(random()));
// disable CFS because this test checks file names

View File

@ -54,7 +54,7 @@ public class TestLucene40PostingsReader extends LuceneTestCase {
*/
public void testPostings() throws Exception {
Directory dir = newFSDirectory(createTempDir("postings"));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(Codec.forName("Lucene40"));
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);

View File

@ -52,7 +52,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
Directory dir = newDirectory();
Codec cp = TestUtil.alwaysPostingsFormat(new Lucene40RWPostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random());
writer.commit();
@ -80,7 +80,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
Directory dir = newDirectory();
Codec cp = TestUtil.alwaysPostingsFormat(new Lucene40RWPostingsFormat());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random());
writer.commit();
@ -128,7 +128,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setCodec(cp));
newIndexWriterConfig(analyzer).setCodec(cp));
int numdocs = atLeast(20);
createRandomIndex(numdocs, writer, random());
writer.commit();

View File

@ -44,7 +44,7 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
dir = newFSDirectory(createTempDir("testDFBlockSize"));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
iw = new RandomIndexWriter(random(), dir, iwc);
iw.setDoRandomForceMerge(false); // we will ourselves
@ -54,7 +54,7 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase {
public void tearDown() throws Exception {
iw.shutdown();
TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
iwc.setOpenMode(OpenMode.APPEND);
IndexWriter iw = new IndexWriter(dir, iwc);

View File

@ -82,7 +82,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
}
}
};
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
// TODO we could actually add more fields implemented with different PFs
// or, just put this test into the usual rotation?
@ -137,7 +137,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
iw.shutdown();
verify(dir);
TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
iwc = newIndexWriterConfig(analyzer);
iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
iwc.setOpenMode(OpenMode.APPEND);
IndexWriter iw2 = new IndexWriter(dir, iwc);

View File

@ -79,7 +79,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
final DocValuesFormat fast = DocValuesFormat.forName("Lucene49");
final DocValuesFormat slow = DocValuesFormat.forName("SimpleText");
iwc.setCodec(new Lucene49Codec() {

View File

@ -97,8 +97,8 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
@Test
public void testMergeUnusedPerFieldCodec() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
IndexWriter writer = newWriter(dir, iwconf);
addDocs(writer, 10);
writer.commit();
@ -124,8 +124,8 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: make new index");
}
IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
IndexWriter writer = newWriter(dir, iwconf);
@ -144,7 +144,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
assertQuery(new Term("content", "aaa"), dir, 10);
Codec codec = iwconf.getCodec();
iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
iwconf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setCodec(codec);
//((LogMergePolicy) iwconf.getMergePolicy()).setNoCFSRatio(0.0);
//((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10);
@ -301,7 +301,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
private void doTestMixedPostings(Codec codec) throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(codec);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();

View File

@ -59,8 +59,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()))
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
// add 100 documents
addDocs(writer, 100);
@ -70,7 +69,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMergePolicy(newLogMergePolicy(false))
);
@ -79,14 +78,14 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(40, writer.maxDoc());
writer.shutdown();
writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = newWriter(aux2, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
// add 50 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.shutdown();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexes(aux, aux2);
assertEquals(190, writer.maxDoc());
@ -101,14 +100,14 @@ public class TestAddIndexes extends LuceneTestCase {
// now add another set in.
Directory aux3 = newDirectory();
writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = newWriter(aux3, newIndexWriterConfig(new MockAnalyzer(random())));
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.shutdown();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexes(aux3);
assertEquals(230, writer.maxDoc());
@ -122,7 +121,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "bbb"), 50);
// now fully merge it.
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
@ -135,11 +134,11 @@ public class TestAddIndexes extends LuceneTestCase {
// now add a single document
Directory aux4 = newDirectory();
writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = newWriter(aux4, newIndexWriterConfig(new MockAnalyzer(random())));
addDocs2(writer, 1);
writer.shutdown();
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexes(aux4);
assertEquals(231, writer.maxDoc());
@ -162,7 +161,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.addIndexes(aux);
// Adds 10 docs, then replaces them with another 10
@ -198,7 +197,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -236,7 +235,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux = newDirectory();
setUpDirs(dir, aux);
IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@ -276,7 +275,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
@ -284,7 +283,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false))
@ -294,7 +293,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.shutdown();
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false))
@ -302,7 +301,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDocs(writer, 100);
writer.shutdown();
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
try {
// cannot add self
writer.addIndexes(aux, dir);
@ -332,7 +331,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(4))
@ -361,7 +360,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(9).
setMergePolicy(newLogMergePolicy(4))
@ -390,7 +389,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(4))
@ -429,7 +428,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(4).
setMergePolicy(newLogMergePolicy(4))
@ -458,7 +457,7 @@ public class TestAddIndexes extends LuceneTestCase {
IndexWriter writer = newWriter(
aux2,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(100).
setMergePolicy(newLogMergePolicy(10))
@ -492,7 +491,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(6).
setMergePolicy(newLogMergePolicy(4))
@ -555,7 +554,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void setUpDirs(Directory dir, Directory aux, boolean withID) throws IOException {
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
if (withID) {
addDocsWithID(writer, 1000, 0);
@ -568,7 +567,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false, 10))
@ -583,7 +582,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.shutdown();
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(1000).
setMergePolicy(newLogMergePolicy(false, 10))
@ -602,7 +601,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setNoCFSRatio(0.0);
lmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new MockAnalyzer(random()))
.setMaxBufferedDocs(5).setMergePolicy(lmp));
Document doc = new Document();
@ -630,8 +629,7 @@ public class TestAddIndexes extends LuceneTestCase {
lmp.setMinMergeMB(0.0001);
lmp.setNoCFSRatio(0.0);
lmp.setMergeFactor(4);
writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()))
writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
writer.addIndexes(dir);
writer.shutdown();
@ -973,7 +971,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dirs[i], conf);
Document doc = new Document();
doc.add(new StringField("id", "myid", Field.Store.NO));
@ -1021,8 +1019,8 @@ public class TestAddIndexes extends LuceneTestCase {
Codec codec = new CustomPerFieldCodec();
IndexWriter writer = null;
writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(codec));
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setCodec(codec));
// add 100 documents
addDocsWithID(writer, 100, 0);
assertEquals(100, writer.maxDoc());
@ -1032,7 +1030,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setCodec(codec).
setMaxBufferedDocs(10).
@ -1046,7 +1044,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = newWriter(
aux2,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setCodec(codec)
);
@ -1059,7 +1057,7 @@ public class TestAddIndexes extends LuceneTestCase {
// test doc count before segments are merged
writer = newWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setCodec(codec)
);
@ -1137,8 +1135,7 @@ public class TestAddIndexes extends LuceneTestCase {
// of the unregistered codec:
toAdd.setCheckIndexOnClose(false);
{
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(new UnRegisteredCodec());
IndexWriter w = new IndexWriter(toAdd, conf);
Document doc = new Document();
@ -1151,8 +1148,7 @@ public class TestAddIndexes extends LuceneTestCase {
{
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(TestUtil.alwaysPostingsFormat(new Pulsing41PostingsFormat(1 + random().nextInt(20))));
IndexWriter w = new IndexWriter(dir, conf);
try {
@ -1268,7 +1264,7 @@ public class TestAddIndexes extends LuceneTestCase {
Directory dest = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setWriteLockTimeout(1);
RandomIndexWriter w2 = new RandomIndexWriter(random(), dest, iwc);

View File

@ -38,7 +38,7 @@ import org.apache.lucene.util.TestUtil;
public class TestAllFilesHaveChecksumFooter extends LuceneTestCase {
public void test() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(new Lucene49Codec());
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
Document doc = new Document();

View File

@ -38,7 +38,7 @@ import org.apache.lucene.util.TestUtil;
public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
public void test() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(new Lucene49Codec());
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf);
Document doc = new Document();

View File

@ -276,7 +276,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
case 0: return new IndexUpgrader(dir, TEST_VERSION_CURRENT);
case 1: return new IndexUpgrader(dir, TEST_VERSION_CURRENT,
streamType ? null : InfoStream.NO_OUTPUT, false);
case 2: return new IndexUpgrader(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null), false);
case 2: return new IndexUpgrader(dir, newIndexWriterConfig(null), false);
default: fail("case statement didn't get updated when random bounds changed");
}
return null; // never get here
@ -330,8 +330,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
try {
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
} catch (IndexFormatTooOldException e) {
// pass
@ -386,8 +385,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
System.out.println("\nTEST: old index " + name);
}
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
w.addIndexes(oldIndexDirs.get(name));
if (VERBOSE) {
System.out.println("\nTEST: done adding indices; now close");
@ -403,8 +401,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(oldIndexDirs.get(name));
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
w.addIndexes(reader);
w.shutdown();
reader.close();
@ -604,7 +601,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
public void changeIndexWithAdds(Random random, Directory dir, String origOldName) throws IOException {
// open writer
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
// add 10 docs
for(int i=0;i<10;i++) {
addDoc(writer, 35+i);
@ -630,7 +629,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
reader.close();
// fully merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
@ -655,7 +656,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
reader.close();
// fully merge
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();

View File

@ -93,8 +93,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdatesAreFlushed() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setRAMBufferSizeMB(0.00000001));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setRAMBufferSizeMB(0.00000001));
writer.addDocument(doc(0)); // val=1
writer.addDocument(doc(1)); // val=2
writer.addDocument(doc(3)); // val=2
@ -115,7 +115,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testSimple() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// make sure random config doesn't flush on us
conf.setMaxBufferedDocs(10);
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -148,7 +148,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateFewSegments() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(2); // generate few segments
conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
@ -195,7 +195,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testReopen() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(doc(0));
writer.addDocument(doc(1));
@ -233,7 +233,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// create an index with a segment with only deletes, a segment with both
// deletes and updates and a segment with only updates
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
@ -282,7 +282,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdatesWithDeletes() throws Exception {
// update and delete different documents in the same commit session
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
IndexWriter writer = new IndexWriter(dir, conf);
@ -316,7 +316,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateAndDeleteSameDocument() throws Exception {
// update and delete same document in same commit session
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
IndexWriter writer = new IndexWriter(dir, conf);
@ -349,7 +349,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testMultipleDocValuesTypes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // prevent merges
IndexWriter writer = new IndexWriter(dir, conf);
@ -398,7 +398,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testMultipleBinaryDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // prevent merges
IndexWriter writer = new IndexWriter(dir, conf);
@ -431,7 +431,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testDocumentWithNoValue() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 2; i++) {
@ -463,7 +463,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// we don't support adding new fields or updating existing non-binary-dv
// fields through binary updates
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -493,7 +493,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testDifferentDVFormatPerField() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(new Lucene49Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
@ -530,7 +530,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateSameDocMultipleTimes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -557,7 +557,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testSegmentMerges() throws Exception {
Directory dir = newDirectory();
Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
IndexWriter writer = new IndexWriter(dir, conf);
int docid = 0;
@ -585,7 +585,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.commit();
} else if (random.nextDouble() < 0.1) {
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
conf = newIndexWriterConfig(new MockAnalyzer(random));
writer = new IndexWriter(dir, conf);
}
@ -628,7 +628,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateDocumentByMultipleTerms() throws Exception {
// make sure the order of updates is respected, even when multiple terms affect same document
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -656,7 +656,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testManyReopensAndFields() throws Exception {
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
LogMergePolicy lmp = newLogMergePolicy();
lmp.setMergeFactor(3); // merge often
conf.setMergePolicy(lmp);
@ -745,7 +745,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateSegmentWithNoDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// legit.
@ -799,7 +799,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateSegmentWithPostingButNoDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// legit.
@ -842,7 +842,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// this used to fail because FieldInfos.Builder neglected to update
// globalFieldMaps.docValueTypes map
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -867,7 +867,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
boolean oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
// create a segment with an old Codec
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]);
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
IndexWriter writer = new IndexWriter(dir, conf);
@ -877,7 +877,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.updateBinaryDocValue(new Term("id", "doc"), "f", toBytes(4L));
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
@ -895,7 +895,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testStressMultiThreading() throws Exception {
final Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
final IndexWriter writer = new IndexWriter(dir, conf);
// create index
@ -1024,7 +1024,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateDifferentDocsInDifferentGens() throws Exception {
// update same document multiple times across generations
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(4);
IndexWriter writer = new IndexWriter(dir, conf);
final int numDocs = atLeast(10);
@ -1060,7 +1060,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testChangeCodec() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene49Codec() {
@Override
@ -1077,7 +1077,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown();
// change format
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene49Codec() {
@Override
@ -1108,7 +1108,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testAddIndexes() throws Exception {
Directory dir1 = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir1, conf);
final int numDocs = atLeast(50);
@ -1138,7 +1138,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
if (random().nextBoolean()) {
writer.addIndexes(dir1);
@ -1165,7 +1165,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testDeleteUnusedUpdatesFiles() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1196,7 +1196,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
IndexWriter writer = new IndexWriter(dir, conf);
@ -1262,7 +1262,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdatesOrder() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1288,7 +1288,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateAllDeletedSegment() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1312,7 +1312,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
public void testUpdateTwoNonexistingTerms() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1337,7 +1337,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// LUCENE-5591: make sure we pass an IOContext with an approximate
// segmentSize in FlushInfo
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// we want a single large enough segment so that a doc-values update writes a large file
conf.setMergePolicy(NoMergePolicy.INSTANCE);
conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush
@ -1350,7 +1350,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.close();
NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, 100, 1/(1024.*1024.));
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
// we want a single large enough segment so that a doc-values update writes a large file
conf.setMergePolicy(NoMergePolicy.INSTANCE);
conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush

View File

@ -39,7 +39,8 @@ public class TestCheckIndex extends LuceneTestCase {
public void testDeletedDocs() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int i=0;i<19;i++) {
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -101,7 +102,7 @@ public class TestCheckIndex extends LuceneTestCase {
// LUCENE-4221: we have to let these thru, for now
public void testBogusTermVectors() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setStoreTermVectors(true);

View File

@ -830,8 +830,7 @@ public class TestCodecs extends LuceneTestCase {
// returns 1 in docsEnum.freq()
Directory dir = newDirectory();
Random random = random();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)));
// we don't need many documents to assert this, but don't use one document either
int numDocs = atLeast(random, 50);
for (int i = 0; i < numDocs; i++) {
@ -857,7 +856,7 @@ public class TestCodecs extends LuceneTestCase {
public void testDisableImpersonation() throws Exception {
Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec() };
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]);
IndexWriter writer = new IndexWriter(dir, conf);

View File

@ -83,7 +83,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
Document doc = new Document();
Field idField = newStringField("id", "", Field.Store.YES);
doc.add(idField);
@ -139,9 +140,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// merging of segments with and without deletes at the
// start:
mp.setMinMergeDocs(1000);
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(mp));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(mp));
Document doc = new Document();
Field idField = newStringField("id", "", Field.Store.YES);
@ -177,9 +177,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
public void testNoExtraFiles() throws IOException {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int iter=0;iter<7;iter++) {
if (VERBOSE) {
@ -196,9 +195,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
writer.shutdown();
@ -214,7 +212,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
// Force excessive merging:
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(100))
@ -248,7 +246,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// Reopen
writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMergePolicy(newLogMergePolicy(100)).
// Force excessive merging:
@ -352,7 +350,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
if (d instanceof MockDirectoryWrapper) {
((MockDirectoryWrapper)d).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(5);
CountDownLatch atLeastOneMerge = new CountDownLatch(1);
iwc.setMergeScheduler(new TrackingCMS(atLeastOneMerge));

View File

@ -37,7 +37,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testSameFieldNumbersAcrossSegments() throws Exception {
for (int i = 0; i < 2; i++) {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document();
d1.add(new StringField("f1", "first field", Field.Store.YES));
@ -46,7 +47,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
if (i == 1) {
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
} else {
writer.commit();
}
@ -76,7 +78,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
assertEquals("f3", fis2.fieldInfo(2).name);
assertEquals("f4", fis2.fieldInfo(3).name);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.forceMerge(1);
writer.shutdown();
@ -100,7 +102,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
public void testAddIndexes() throws Exception {
Directory dir1 = newDirectory();
Directory dir2 = newDirectory();
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document();
d1.add(new TextField("f1", "first field", Field.Store.YES));
@ -108,7 +111,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.addDocument(d1);
writer.shutdown();
writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d2 = new Document();
FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@ -121,7 +125,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.shutdown();
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
writer.addIndexes(dir2);
writer.shutdown();
@ -149,9 +154,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
for (int i = 0; i < numIters; i++) {
Directory dir = newDirectory();
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d = new Document();
d.add(new TextField("f1", "d1 first field", Field.Store.YES));
d.add(new TextField("f2", "d1 second field", Field.Store.YES));
@ -167,8 +171,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d = new Document();
d.add(new TextField("f1", "d2 first field", Field.Store.YES));
d.add(new StoredField("f3", new byte[] { 1, 2, 3 }));
@ -187,8 +191,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
}
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d = new Document();
d.add(new TextField("f1", "d3 first field", Field.Store.YES));
d.add(new TextField("f2", "d3 second field", Field.Store.YES));
@ -212,8 +216,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
}
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
writer.deleteDocuments(new Term("f1", "d1"));
// nuke the first segment entirely so that the segment with gaps is
// loaded first!
@ -221,9 +225,9 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.shutdown();
}
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(
new LogByteSizeMergePolicy()).setInfoStream(new FailOnNonBulkMergesInfoStream()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(new LogByteSizeMergePolicy())
.setInfoStream(new FailOnNonBulkMergesInfoStream()));
writer.forceMerge(1);
writer.shutdown();
@ -251,7 +255,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
}
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();

View File

@ -37,7 +37,7 @@ public class TestCrash extends LuceneTestCase {
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
if (initialCommit) {

View File

@ -65,7 +65,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// NOTE: cannot use RandomIndexWriter because it
// sometimes commits:
IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
indexWriter.addDocument(getDocument());
// writes segments_1:
@ -96,7 +96,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// it doesn't know what to do with the created but empty
// segments_2 file
IndexWriter indexWriter = new IndexWriter(realDirectory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
// currently the test fails above.
// however, to test the fix, the following lines should pass as well.

View File

@ -45,8 +45,7 @@ public class TestCustomNorms extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
analyzer);
IndexWriterConfig config = newIndexWriterConfig(analyzer);
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);

View File

@ -221,8 +221,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
final double SECONDS = 2.0;
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()))
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS));
MergePolicy mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
@ -240,9 +239,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
writer = new IndexWriter(dir, conf);
@ -316,8 +315,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir))
.setMaxBufferedDocs(10)
.setMergeScheduler(new SerialMergeScheduler());
@ -337,9 +335,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
r.close();
}
if (needsMerging) {
conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy);
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
if (VERBOSE) {
@ -384,10 +382,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.listAll().length;
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy));
writer.shutdown();
int postCount = dir.listAll().length;
assertTrue(postCount < preCount);
@ -406,7 +403,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir)).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
@ -429,7 +426,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertTrue(lastCommit != null);
// Now add 1 doc and merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.forceMerge(1);
@ -438,8 +436,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(6, DirectoryReader.listCommits(dir).size());
// Now open writer on the commit just before merge:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Should undo our rollback:
@ -451,8 +450,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(11, r.numDocs());
r.close();
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
writer.shutdown();
@ -468,7 +468,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
r.close();
// Re-merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy));
writer.forceMerge(1);
writer.shutdown();
@ -479,7 +480,8 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Now open writer on the commit just before merging,
// but this time keeping only the last commit:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexCommit(lastCommit));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees fully merged index, because writer
@ -512,8 +514,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy())
.setMaxBufferedDocs(10);
@ -526,8 +527,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy);
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(1.0);
writer = new IndexWriter(dir, conf);
@ -563,8 +565,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j<N+1;j++) {
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
@ -622,8 +623,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
boolean useCompoundFile = (pass % 2) != 0;
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(new KeepLastNDeletionPolicy(N))
.setMaxBufferedDocs(10);
@ -637,9 +637,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<N+1;i++) {
conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy)
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy)
.setMaxBufferedDocs(10);
mp = conf.getMergePolicy();
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
@ -664,9 +664,9 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(16, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setIndexDeletionPolicy(policy));
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
// This will not commit: there are no changes
// pending because we opened for "create":

View File

@ -124,10 +124,8 @@ public class TestDirectoryReader extends LuceneTestCase {
}
private void addDoc(Random random, Directory ramDir1, String s, boolean create) throws IOException {
IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig(
TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setOpenMode(
create ? OpenMode.CREATE : OpenMode.APPEND));
IndexWriter iw = new IndexWriter(ramDir1, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(create ? OpenMode.CREATE : OpenMode.APPEND));
Document doc = new Document();
doc.add(newTextField("body", s, Field.Store.NO));
iw.addDocument(doc);
@ -136,22 +134,21 @@ public class TestDirectoryReader extends LuceneTestCase {
public void testIsCurrent() throws Exception {
Directory d = newDirectory();
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
addDocumentWithFields(writer);
writer.shutdown();
// set up reader:
DirectoryReader reader = DirectoryReader.open(d);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.shutdown();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.shutdown();
assertFalse(reader.isCurrent());
@ -168,7 +165,7 @@ public class TestDirectoryReader extends LuceneTestCase {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
newIndexWriterConfig(new MockAnalyzer(random()))
);
Document doc = new Document();
@ -194,9 +191,9 @@ public class TestDirectoryReader extends LuceneTestCase {
// add more documents
writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMergePolicy(newLogMergePolicy())
newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy())
);
// want to get some more segments here
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
@ -312,8 +309,8 @@ public void testTermVectors() throws Exception {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy())
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy())
);
// want to get some more segments here
// new termvector fields
@ -369,7 +366,8 @@ void assertTermDocsCount(String msg,
Directory dir = newDirectory();
byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < 10; i++) {
addDoc(writer, "document number " + (i + 1));
@ -378,7 +376,9 @@ void assertTermDocsCount(String msg,
addDocumentWithTermVectorFields(writer);
}
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
doc.add(new StoredField("bin1", bin));
doc.add(new TextField("junk", "junk text", Field.Store.NO));
@ -400,7 +400,9 @@ void assertTermDocsCount(String msg,
// force merge
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
reader = DirectoryReader.open(dir);
@ -438,7 +440,7 @@ public void testFilesOpenClose() throws IOException {
// Create initial data set
File dirFile = createTempDir("TestIndexReader.testFilesOpenClose");
Directory dir = newFSDirectory(dirFile);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
addDoc(writer, "test");
writer.shutdown();
dir.close();
@ -448,7 +450,8 @@ public void testFilesOpenClose() throws IOException {
dir = newFSDirectory(dirFile);
// Now create the data set again, just as before
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
addDoc(writer, "test");
writer.shutdown();
dir.close();
@ -648,9 +651,9 @@ public void testFilesOpenClose() throws IOException {
// set up writer
IndexWriter writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(10))
);
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
@ -668,10 +671,10 @@ public void testFilesOpenClose() throws IOException {
// Change the index
writer = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(10))
);
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
@ -683,9 +686,8 @@ public void testFilesOpenClose() throws IOException {
assertFalse(r2.getIndexCommit().getSegmentCount() == 1);
r2.close();
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
@ -730,9 +732,8 @@ public void testFilesOpenClose() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
@ -754,7 +755,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-1586: getUniqueTermCount
public void testUniqueTermCount() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a b c d e f g h i j k l m n o p q r s t u v w x y z", Field.Store.NO));
doc.add(newTextField("number", "0 1 2 3 4 5 6 7 8 9", Field.Store.NO));
@ -784,8 +785,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-2046
public void testPrepareCommitIsCurrent() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.commit();
Document doc = new Document();
writer.addDocument(doc);
@ -806,7 +806,7 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-2753
public void testListCommits() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null)
.setIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.addDocument(new Document());
@ -831,7 +831,7 @@ public void testFilesOpenClose() throws IOException {
// dict cache
public void testTotalTermFreqCached() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newTextField("f", "a a b", Field.Store.NO));
writer.addDocument(d);
@ -851,7 +851,7 @@ public void testFilesOpenClose() throws IOException {
public void testGetSumDocFreq() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newTextField("f", "a", Field.Store.NO));
writer.addDocument(d);
@ -872,7 +872,7 @@ public void testFilesOpenClose() throws IOException {
public void testGetDocCount() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newTextField("f", "a", Field.Store.NO));
writer.addDocument(d);
@ -893,7 +893,7 @@ public void testFilesOpenClose() throws IOException {
public void testGetSumTotalTermFreq() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newTextField("f", "a b b", Field.Store.NO));
writer.addDocument(d);
@ -915,7 +915,8 @@ public void testFilesOpenClose() throws IOException {
// LUCENE-2474
public void testReaderFinishedListener() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
((LogMergePolicy) writer.getConfig().getMergePolicy()).setMergeFactor(3);
writer.addDocument(new Document());
writer.commit();
@ -949,7 +950,7 @@ public void testFilesOpenClose() throws IOException {
public void testOOBDocID() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
DirectoryReader r = writer.getReader();
writer.shutdown();
@ -966,7 +967,7 @@ public void testFilesOpenClose() throws IOException {
public void testTryIncRef() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.commit();
DirectoryReader r = DirectoryReader.open(dir);
@ -980,7 +981,7 @@ public void testFilesOpenClose() throws IOException {
public void testStressTryIncRef() throws IOException, InterruptedException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.commit();
DirectoryReader r = DirectoryReader.open(dir);

View File

@ -102,9 +102,10 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
private void doTestReopenWithCommit (Random random, Directory dir, boolean withReopen) throws IOException {
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(
OpenMode.CREATE).setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(newLogMergePolicy()));
IndexWriter iwriter = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.CREATE)
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(newLogMergePolicy()));
iwriter.commit();
DirectoryReader reader = DirectoryReader.open(dir);
try {
@ -202,8 +203,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
final Directory dir = newDirectory();
// NOTE: this also controls the number of threads!
final int n = TestUtil.nextInt(random(), 20, 40);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
@ -548,10 +548,10 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setIndexDeletionPolicy(new KeepAllCommits()).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new KeepAllCommits())
.setMaxBufferedDocs(-1)
.setMergePolicy(newLogMergePolicy(10))
);
for(int i=0;i<4;i++) {
Document doc = new Document();
@ -604,7 +604,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
Directory dir = newDirectory();
// Can't use RIW because it randomly commits:
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("field", "value", Field.Store.NO));
w.addDocument(doc);

View File

@ -123,7 +123,7 @@ public class TestDoc extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))
@ -165,7 +165,7 @@ public class TestDoc extends LuceneTestCase {
writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(10))

View File

@ -193,7 +193,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testDocValuesUnstored() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwconfig = newIndexWriterConfig(new MockAnalyzer(random()));
iwconfig.setMergePolicy(newLogMergePolicy());
IndexWriter writer = new IndexWriter(dir, iwconfig);
for (int i = 0; i < 50; i++) {
@ -223,7 +223,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Same field in one document as different types:
public void testMixedTypesSameDocument() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
w.addDocument(new Document());
Document doc = new Document();
@ -244,7 +244,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Two documents with same field as different types:
public void testMixedTypesDifferentDocuments() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
@ -268,7 +268,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -298,7 +298,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -326,7 +326,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -352,7 +352,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -383,7 +383,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -411,7 +411,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Two documents across segments
public void testMixedTypesDifferentSegments() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
@ -431,7 +431,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Add inconsistent document after deleteAll
public void testMixedTypesAfterDeleteAll() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
@ -447,13 +447,13 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Add inconsistent document after reopening IW w/ create
public void testMixedTypesAfterReopenCreate() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
w.shutdown();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
w = new IndexWriter(dir, iwc);
doc = new Document();
@ -467,7 +467,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// from separate threads:
public void testMixedTypesDifferentThreads() throws Exception {
Directory dir = newDirectory();
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
final CountDownLatch startingGun = new CountDownLatch(1);
final AtomicBoolean hitExc = new AtomicBoolean();
@ -514,14 +514,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// Adding documents via addIndexes
public void testMixedTypesViaAddIndexes() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
// Make 2nd index w/ inconsistent field
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
w2.addDocument(doc);
@ -548,7 +548,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testIllegalTypeChange() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -570,14 +570,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testIllegalTypeChangeAcrossSegments() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
@ -593,14 +593,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeAfterCloseAndDeleteAll() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.deleteAll();
doc = new Document();
@ -612,7 +612,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeAfterDeleteAll() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -627,7 +627,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeAfterCommitAndDeleteAll() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -643,13 +643,13 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeAfterOpenCreate() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
writer = new IndexWriter(dir, conf);
doc = new Document();
@ -661,7 +661,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeViaAddIndexes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -669,7 +669,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
@ -688,7 +688,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeViaAddIndexesIR() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -696,7 +696,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
@ -717,7 +717,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeViaAddIndexes2() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -725,7 +725,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
writer.addIndexes(dir);
doc = new Document();
@ -743,7 +743,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testTypeChangeViaAddIndexesIR2() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -751,7 +751,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
IndexReader[] readers = new IndexReader[] {DirectoryReader.open(dir)};
writer.addIndexes(readers);
@ -771,7 +771,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
public void testDocsWithField() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
@ -800,7 +800,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// globalFieldNumbers.docValuesType map if the field existed, resulting in
// potentially adding the same field with different DV types.
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();

View File

@ -47,7 +47,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testPositionsSimple() throws IOException {
Directory directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 39; i++) {
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
@ -111,7 +111,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testRandomPositions() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(47);
int max = 1051;
int term = random().nextInt(max);
@ -194,7 +195,8 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testRandomDocs() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
int numDocs = atLeast(49);
int max = 15678;
int term = random().nextInt(max);
@ -273,7 +275,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
public void testLargeNumberOfPositions() throws IOException {
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
int howMany = 1000;
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setOmitNorms(true);

View File

@ -58,7 +58,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testAddDocument() throws Exception {
Document testDoc = new Document();
DocHelper.setupDoc(testDoc);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(testDoc);
writer.commit();
SegmentCommitInfo info = writer.newestSegment();
@ -115,7 +115,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
doc.add(newTextField("repeated", "repeated one", Field.Store.YES));
@ -188,7 +188,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
};
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
doc.add(newTextField("f1", "a 5 a a", Field.Store.YES));
@ -214,8 +214,7 @@ public class TestDocumentWriter extends LuceneTestCase {
public void testPreAnalyzedField() throws IOException {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new TextField("preanalyzed", new TokenStream() {
@ -281,8 +280,7 @@ public class TestDocumentWriter extends LuceneTestCase {
doc.add(f);
doc.add(newField("f2", "v2", customType2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(doc);
writer.forceMerge(1); // be sure to have a single segment
writer.shutdown();

View File

@ -71,12 +71,12 @@ public class TestDuelingCodecs extends LuceneTestCase {
// but these can be different
// TODO: this turns this into a really big test of Multi*, is that what we want?
IndexWriterConfig leftConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, leftAnalyzer);
IndexWriterConfig leftConfig = newIndexWriterConfig(leftAnalyzer);
leftConfig.setCodec(leftCodec);
// preserve docids
leftConfig.setMergePolicy(newLogMergePolicy());
IndexWriterConfig rightConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, rightAnalyzer);
IndexWriterConfig rightConfig = newIndexWriterConfig(rightAnalyzer);
rightConfig.setCodec(rightCodec);
// preserve docids
rightConfig.setMergePolicy(newLogMergePolicy());

View File

@ -52,7 +52,8 @@ public class TestFieldsReader extends LuceneTestCase {
fieldInfos.addOrUpdate(field.name(), field.fieldType());
}
dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy());
conf.getMergePolicy().setNoCFSRatio(0.0);
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(testDoc);
@ -197,8 +198,8 @@ public class TestFieldsReader extends LuceneTestCase {
try {
Directory dir = new FaultyFSDirectory(indexDir);
IndexWriterConfig iwc = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE);
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE);
IndexWriter writer = new IndexWriter(dir, iwc);
for(int i=0;i<2;i++)
writer.addDocument(testDoc);

View File

@ -116,7 +116,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
public void testFilterIndexReader() throws Exception {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())));
Document d1 = new Document();
d1.add(newTextField("default", "one two", Field.Store.YES));
@ -137,7 +137,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
// We mess with the postings so this can fail:
((BaseDirectoryWrapper) target).setCrossCheckTermVectorsOnClose(false);
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader reader = new TestReader(DirectoryReader.open(directory));
writer.addIndexes(reader);
writer.shutdown();

View File

@ -64,8 +64,8 @@ public class TestFlex extends LuceneTestCase {
public void testTermOrd() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())));
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())));
Document doc = new Document();
doc.add(newTextField("f", "a b c", Field.Store.NO));
w.addDocument(doc);

View File

@ -68,8 +68,8 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
analyzer).setFlushPolicy(flushPolicy);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer)
.setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new DocumentsWriterPerThreadPool(
numDWPT);
@ -124,8 +124,8 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setFlushPolicy(flushPolicy);
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
.setFlushPolicy(flushPolicy);
final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new DocumentsWriterPerThreadPool(
@ -173,8 +173,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
final int numDocumentsToIndex = 50 + atLeast(70);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
iwc.setFlushPolicy(flushPolicy);
@ -238,8 +237,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
MockDirectoryWrapper dir = newMockDirectory();
// mock a very slow harddisk sometimes here so that flushing is very slow
dir.setThrottling(MockDirectoryWrapper.Throttling.SOMETIMES);
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
iwc.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
FlushPolicy flushPolicy = new FlushByRamOrCountsPolicy();

View File

@ -42,7 +42,9 @@ public class TestForTooMuchCloning extends LuceneTestCase {
final TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setMaxMergeAtOnce(2);
final RandomIndexWriter w = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(tmp));
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(tmp));
final int numDocs = 20;
for(int docs=0;docs<numDocs;docs++) {
StringBuilder sb = new StringBuilder();

View File

@ -57,7 +57,7 @@ public class TestForceMergeForever extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
final MyIndexWriter w = new MyIndexWriter(d, newIndexWriterConfig(analyzer));
// Try to make an index that requires merging:
w.getConfig().setMaxBufferedDocs(TestUtil.nextInt(random(), 2, 11));

View File

@ -52,9 +52,9 @@ public class TestIndexFileDeleter extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(mergePolicy).setUseCompoundFile(true)
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(mergePolicy).setUseCompoundFile(true)
);
int i;
@ -71,8 +71,9 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// Delete one doc so we get a .del file:
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.INSTANCE).setUseCompoundFile(true)
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE)
.setUseCompoundFile(true)
);
Term searchTerm = new Term("id", "7");
writer.deleteDocuments(searchTerm);
@ -124,7 +125,8 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// Open & close a writer: it should delete the above 4
// files and nothing more:
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.shutdown();
String[] files2 = dir.listAll();

View File

@ -104,7 +104,7 @@ public class TestIndexWriter extends LuceneTestCase {
try {
IndexWriterConfig.setDefaultWriteLockTimeout(2000);
assertEquals(2000, IndexWriterConfig.getDefaultWriteLockTimeout());
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
} finally {
IndexWriterConfig.setDefaultWriteLockTimeout(savedWriteLockTimeout);
}
@ -117,7 +117,8 @@ public class TestIndexWriter extends LuceneTestCase {
writer.shutdown();
// delete 40 documents
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
for (i = 0; i < 40; i++) {
writer.deleteDocuments(new Term("id", ""+i));
}
@ -128,7 +129,7 @@ public class TestIndexWriter extends LuceneTestCase {
reader.close();
// merge the index down and check that the new doc count is correct
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
assertEquals(60, writer.numDocs());
writer.forceMerge(1);
assertEquals(60, writer.maxDoc());
@ -143,7 +144,8 @@ public class TestIndexWriter extends LuceneTestCase {
// make sure opening a new index for create over
// this existing one works correctly:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
assertEquals(0, writer.maxDoc());
assertEquals(0, writer.numDocs());
writer.shutdown();
@ -198,7 +200,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
// add one document & close writer
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
addDoc(writer);
writer.shutdown();
@ -207,7 +209,8 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals("should be one document", reader.numDocs(), 1);
// now open index for create:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
assertEquals("should be zero documents", writer.maxDoc(), 0);
addDoc(writer);
writer.shutdown();
@ -226,7 +229,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
addDoc(writer);
// close
@ -244,7 +247,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexNoDocuments() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.commit();
writer.shutdown();
@ -253,7 +256,8 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals(0, reader.numDocs());
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.commit();
writer.shutdown();
@ -266,7 +270,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testManyFields() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10));
for(int j=0;j<100;j++) {
Document doc = new Document();
doc.add(newField("a"+j, "aaa" + j, storedTextType));
@ -298,9 +303,9 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.000001).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.000001)
.setMergePolicy(newLogMergePolicy(10))
);
int lastNumFile = dir.listAll().length;
for(int j=0;j<9;j++) {
@ -321,7 +326,7 @@ public class TestIndexWriter extends LuceneTestCase {
// maxBufferedDocs in a write session
public void testChangingRAMBuffer() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.getConfig().setMaxBufferedDocs(10);
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -375,7 +380,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testChangingRAMBuffer2() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.getConfig().setMaxBufferedDocs(10);
writer.getConfig().setMaxBufferedDeleteTerms(10);
writer.getConfig().setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -436,7 +441,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDiverseDocs() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.5));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.5));
int n = atLeast(1);
for(int i=0;i<n;i++) {
// First, docs where every term is unique (heavy on
@ -485,7 +491,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEnablingNorms() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10));
// Enable norms for only 1 doc, pre flush
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setOmitNorms(true);
@ -511,8 +518,8 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals(10, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, post flush
for(int j=0;j<27;j++) {
Document doc = new Document();
@ -541,8 +548,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testHighFreqTerm() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's
StringBuilder b = new StringBuilder(1024*1024);
for(int i=0;i<4096;i++) {
@ -598,8 +605,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
Directory dir = new MyRAMDirectory(new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
@ -611,7 +617,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertEquals("did not get right number of hits", 100, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
writer.shutdown();
dir.close();
@ -621,9 +627,9 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(10))
);
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -647,7 +653,7 @@ public class TestIndexWriter extends LuceneTestCase {
// empty doc (no norms) and flush
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
@ -676,8 +682,7 @@ public class TestIndexWriter extends LuceneTestCase {
*/
public void testBadSegment() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document document = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
@ -693,9 +698,9 @@ public class TestIndexWriter extends LuceneTestCase {
int pri = Thread.currentThread().getPriority();
try {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(2);
IndexWriter iw = new IndexWriter(dir, conf);
Document document = new Document();
@ -718,7 +723,9 @@ public class TestIndexWriter extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: iter=" + i);
}
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy()));
//LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
//lmp.setMergeFactor(2);
//lmp.setNoCFSRatio(0.0);
@ -747,7 +754,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.shutdown();
if (0 == i % 4) {
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
//LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
//lmp2.setNoCFSRatio(0.0);
writer.forceMerge(1);
@ -761,7 +768,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testUnlimitedMaxFieldLength() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
StringBuilder b = new StringBuilder();
@ -784,7 +791,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1179
public void testEmptyFieldName() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("", "a b c", Field.Store.NO));
writer.addDocument(doc);
@ -794,7 +801,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEmptyFieldNameTerms() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("", "a b c", Field.Store.NO));
writer.addDocument(doc);
@ -812,7 +819,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testEmptyFieldNameWithEmptyTerm() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("", "", Field.Store.NO));
doc.add(newStringField("", "a", Field.Store.NO));
@ -858,7 +865,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-1222
public void testDoBeforeAfterFlush() throws IOException {
Directory dir = newDirectory();
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
MockIndexWriter w = new MockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
doc.add(newField("field", "a field", customType));
@ -902,7 +909,7 @@ public class TestIndexWriter extends LuceneTestCase {
};
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new TextField("field", tokens));
try {
@ -920,8 +927,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setPositionIncrementGap( 100 );
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -957,7 +963,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDeadlock() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -973,7 +980,7 @@ public class TestIndexWriter extends LuceneTestCase {
// index has 2 segments
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
writer2.addDocument(doc);
writer2.shutdown();
@ -1282,7 +1289,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIndexStoreCombos() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
byte[] b = new byte[50];
for(int i=0;i<50;i++)
b[i] = (byte) (i+77);
@ -1364,8 +1371,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNoDocsIndex() throws Throwable {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.shutdown();
@ -1385,8 +1391,9 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(mergePolicy).setUseCompoundFile(true)
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(mergePolicy)
.setUseCompoundFile(true)
);
Document doc = new Document();
doc.add(newTextField("field", "go", Field.Store.NO));
@ -1467,9 +1474,8 @@ public class TestIndexWriter extends LuceneTestCase {
// Validates that iw.deleteUnusedFiles() also deletes unused index commits
// in case a deletion policy which holds onto commits is used.
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy())));
SnapshotDeletionPolicy sdp = (SnapshotDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
// First commit
@ -1509,7 +1515,7 @@ public class TestIndexWriter extends LuceneTestCase {
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
// when listAll() was called in IndexFileDeleter.
Directory dir = newFSDirectory(createTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))).shutdown();
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).shutdown();
dir.close();
}
@ -1520,9 +1526,10 @@ public class TestIndexWriter extends LuceneTestCase {
// indexed, flushed (but not committed) and then IW rolls back, then no
// files are left in the Directory.
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()).setUseCompoundFile(false));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy())
.setUseCompoundFile(false));
String[] files = dir.listAll();
// Creating over empty dir should not create any files,
@ -1577,8 +1584,8 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNoSegmentFile() throws IOException {
BaseDirectoryWrapper dir = newDirectory();
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
@ -1588,9 +1595,9 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newField("c", "val", customType));
w.addDocument(doc);
w.addDocument(doc);
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)
.setOpenMode(OpenMode.CREATE));
IndexWriter w2 = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setOpenMode(OpenMode.CREATE));
w2.shutdown();
// If we don't do that, the test fails on Windows
@ -1605,7 +1612,9 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNoUnwantedTVFiles() throws Exception {
Directory dir = newDirectory();
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setRAMBufferSizeMB(0.01).setMergePolicy(newLogMergePolicy()));
IndexWriter indexWriter = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.01)
.setMergePolicy(newLogMergePolicy()));
indexWriter.getConfig().getMergePolicy().setNoCFSRatio(0.0);
String BIG="alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg";
@ -1820,7 +1829,8 @@ public class TestIndexWriter extends LuceneTestCase {
RandomIndexWriter w1 = new RandomIndexWriter(random(), d);
w1.deleteAll();
try {
new RandomIndexWriter(random(), d, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setWriteLockTimeout(100));
new RandomIndexWriter(random(), d, newIndexWriterConfig(null)
.setWriteLockTimeout(100));
fail("should not be able to create another writer");
} catch (LockObtainFailedException lofe) {
// expected
@ -1933,8 +1943,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
};
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -1957,7 +1966,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testOtherFiles() throws Throwable {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
iw.addDocument(new Document());
iw.shutdown();
try {
@ -1966,7 +1975,7 @@ public class TestIndexWriter extends LuceneTestCase {
out.writeByte((byte) 42);
out.close();
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))).shutdown();
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).shutdown();
assertTrue(slowFileExists(dir, "myrandomfile"));
} finally {
@ -2082,7 +2091,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-4575
public void testCommitWithUserDataOnly() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null));
writer.commit(); // first commit to complete IW create transaction.
// this should store the commit data, even though no other changes were made
@ -2124,7 +2133,7 @@ public class TestIndexWriter extends LuceneTestCase {
@Test
public void testGetCommitData() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null));
writer.setCommitData(new HashMap<String,String>() {{
put("key", "value");
}});
@ -2132,7 +2141,8 @@ public class TestIndexWriter extends LuceneTestCase {
writer.shutdown();
// validate that it's also visible when opening a new IndexWriter
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(null)
.setOpenMode(OpenMode.APPEND));
assertEquals("value", writer.getCommitData().get("key"));
writer.shutdown();
@ -2141,7 +2151,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testNullAnalyzer() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig iwConf = newIndexWriterConfig(null);
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf);
// add 3 good docs
for (int i = 0; i < 3; i++) {
@ -2273,8 +2283,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIterableFieldThrowsException() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
int iters = atLeast(100);
int docCount = 0;
int docId = 0;
@ -2335,8 +2344,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIterableThrowsException() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
int iters = atLeast(100);
int docCount = 0;
int docId = 0;
@ -2388,8 +2396,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testIterableThrowsException2() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
try {
w.addDocuments(new Iterable<Document>() {
@Override
@ -2459,7 +2466,7 @@ public class TestIndexWriter extends LuceneTestCase {
for(int i=0;i<6;i++) {
BaseDirectoryWrapper dir = newDirectory();
dir.createOutput("segments_0", IOContext.DEFAULT).close();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
int mode = i/2;
if (mode == 0) {
iwc.setOpenMode(OpenMode.CREATE);
@ -2504,7 +2511,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testHasUncommittedChanges() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
assertTrue(writer.hasUncommittedChanges()); // this will be true because a commit will create an empty index
Document doc = new Document();
doc.add(newTextField("myfield", "a b c", Field.Store.NO));
@ -2542,7 +2549,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertFalse(writer.hasUncommittedChanges());
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
assertFalse(writer.hasUncommittedChanges());
writer.addDocument(doc);
assertTrue(writer.hasUncommittedChanges());
@ -2553,7 +2560,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testMergeAllDeleted() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
final SetOnce<IndexWriter> iwRef = new SetOnce<>();
iwc.setInfoStream(new RandomIndexWriter.TestPointInfoStream(iwc.getInfoStream(), new RandomIndexWriter.TestPoint() {
@Override
@ -2607,7 +2614,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory directory = newDirectory();
// we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iwriter = new IndexWriter(directory, iwc);
Document doc = new Document();
@ -2627,7 +2634,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDoubleClose() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
w.addDocument(doc);
@ -2639,7 +2646,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testRollbackThenClose() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
w.addDocument(doc);
@ -2651,7 +2658,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testCloseThenRollback() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
w.addDocument(doc);
@ -2665,7 +2672,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
// If version is < 50 IW.close should throw an exception
// on uncommitted changes:
IndexWriterConfig iwc = newIndexWriterConfig(Version.LUCENE_4_8, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
@ -2689,7 +2696,7 @@ public class TestIndexWriter extends LuceneTestCase {
// If version is < 50 IW.close should throw an exception
// on still-running merges:
IndexWriterConfig iwc = newIndexWriterConfig(Version.LUCENE_4_8, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random()));
LogDocMergePolicy mp = new LogDocMergePolicy();
mp.setMergeFactor(2);
iwc.setMergePolicy(mp);
@ -2764,7 +2771,7 @@ public class TestIndexWriter extends LuceneTestCase {
// Allow writing to same file more than once:
dir.setPreventDoubleWrite(false);
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
LogMergePolicy lmp = new LogDocMergePolicy();
lmp.setMergeFactor(2);
iwc.setMergePolicy(lmp);

View File

@ -42,7 +42,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
*/
public void testCommitOnClose() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
@ -57,7 +57,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for(int i=0;i<3;i++) {
for(int j=0;j<11;j++) {
TestIndexWriter.addDoc(writer);
@ -93,7 +93,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
*/
public void testCommitOnCloseAbort() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10));
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
@ -106,8 +107,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals("first number of hits", 14, hits.length);
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10));
for(int j=0;j<17;j++) {
TestIndexWriter.addDoc(writer);
}
@ -133,8 +135,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// Now make sure we can re-open the index, add docs,
// and all is good:
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10));
// On abort, writer in fact may write to the same
// segments_N file:
@ -204,10 +207,10 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(10).
setReaderPooling(false).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(10)
.setReaderPooling(false)
.setMergePolicy(newLogMergePolicy(10))
);
for(int j=0;j<30;j++) {
TestIndexWriter.addDocWithIndex(writer, j);
@ -219,12 +222,12 @@ public class TestIndexWriterCommit extends LuceneTestCase {
long startDiskUsage = dir.getMaxUsedSizeInBytes();
writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setOpenMode(OpenMode.APPEND).
setMaxBufferedDocs(10).
setMergeScheduler(new SerialMergeScheduler()).
setReaderPooling(false).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10)
.setMergeScheduler(new SerialMergeScheduler())
.setReaderPooling(false)
.setMergePolicy(newLogMergePolicy(10))
);
for(int j=0;j<1470;j++) {
@ -268,16 +271,17 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy(10))
);
for(int j=0;j<17;j++) {
TestIndexWriter.addDocWithIndex(writer, j);
}
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
// Open a reader before closing (commiting) the writer:
@ -302,7 +306,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: do real full merge");
}
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
@ -327,8 +332,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
final int NUM_THREADS = 5;
final double RUN_SEC = 0.5;
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
TestUtil.reduceOpenFiles(w.w);
w.commit();
final AtomicBoolean failed = new AtomicBoolean();
@ -383,9 +388,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(5))
);
writer.commit();
@ -420,7 +425,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
public void testFutureCommit() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
Document doc = new Document();
w.addDocument(doc);
@ -447,7 +453,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertNotNull(commit);
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit));
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)
.setIndexCommit(commit));
assertEquals(1, w.numDocs());
@ -476,7 +484,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// changed since LUCENE-2386, where before IW would always commit on a fresh
// new index.
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
try {
DirectoryReader.listCommits(dir);
fail("listCommits should have thrown an exception over empty index");
@ -495,9 +503,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(5))
);
writer.commit();
@ -554,9 +562,9 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(5))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(5))
);
writer.commit();
@ -580,7 +588,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader.close();
reader2.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 17; i++)
TestIndexWriter.addDoc(writer);
@ -606,7 +614,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
public void testPrepareCommitNoChanges() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.prepareCommit();
writer.commit();
writer.shutdown();
@ -620,7 +628,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// LUCENE-1382
public void testCommitUserData() throws IOException {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int j=0;j<17;j++)
TestIndexWriter.addDoc(w);
w.shutdown();
@ -630,7 +639,8 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals(0, r.getIndexCommit().getUserData().size());
r.close();
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int j=0;j<17;j++)
TestIndexWriter.addDoc(w);
Map<String,String> data = new HashMap<>();
@ -642,7 +652,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals("test1", r.getIndexCommit().getUserData().get("label"));
r.close();
w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
w.forceMerge(1);
w.shutdown();

View File

@ -143,7 +143,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
public void testReuse() throws Exception {
Directory dir = newDirectory();
// test that IWC cannot be reused across two IWs
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig conf = newIndexWriterConfig(null);
new RandomIndexWriter(random(), dir, conf).shutdown();
// this should fail

View File

@ -58,8 +58,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDeleteTerms(1));
FieldType custom1 = new FieldType();
custom1.setStored(true);
@ -97,8 +97,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testNonRAMDelete() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@ -130,8 +130,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
public void testMaxBufferedDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDeleteTerms(1));
writer.addDocument(new Document());
writer.deleteDocuments(new Term("foobar", "1"));
@ -149,8 +149,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
System.out.println("TEST: t=" + t);
}
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(4)
.setMaxBufferedDeleteTerms(4));
int id = 0;
int value = 100;
@ -187,8 +187,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test when delete terms apply to both disk and ram segments
public void testBothDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(100)
.setMaxBufferedDeleteTerms(100));
int id = 0;
@ -221,8 +221,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test that batched delete terms are flushed together
public void testBatchDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -264,8 +264,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll()
public void testDeleteAll() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -373,8 +373,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test rollback of deleteAll()
public void testDeleteAllRollback() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -409,8 +409,8 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// test deleteAll() w/ near real-time reader
public void testDeleteAllNRT() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2)
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2)
.setMaxBufferedDeleteTerms(2));
int id = 0;
@ -499,7 +499,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
MockDirectoryWrapper startDir = newMockDirectory();
// TODO: find the resource leak that only occurs sometimes here.
startDir.setNoDeleteOpenFile(false);
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(newStringField("id", Integer.toString(i), Field.Store.YES));
@ -525,8 +525,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
dir.setPreventDoubleWrite(false);
dir.setAllowRandomFileNotFoundException(false);
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(1000)
.setMaxBufferedDeleteTerms(1000)
.setMergeScheduler(new ConcurrentMergeScheduler()));
@ -766,8 +765,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy()));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setMaxBufferedDeleteTerms(2)
.setReaderPooling(false)
.setMergePolicy(newLogMergePolicy()));
MergePolicy lmp = modifier.getConfig().getMergePolicy();
lmp.setNoCFSRatio(1.0);
@ -891,7 +892,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
String[] text = { "Amsterdam", "Venice" };
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
modifier.commit();
dir.failOn(failure.reset());
@ -982,7 +983,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, true));
}
};
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setRAMBufferSizeMB(1.0)
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH));
Document doc = new Document();
doc.add(newTextField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO));
int num = atLeast(3);
@ -1023,8 +1027,11 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// ever call commit() for this test:
// note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999)
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false));
newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.1f)
.setMaxBufferedDocs(1000)
.setMergePolicy(NoMergePolicy.INSTANCE)
.setReaderPooling(false));
int count = 0;
while(true) {
Document doc = new Document();
@ -1069,8 +1076,12 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// ever call commit() for this test:
final int flushAtDelCount = atLeast(1020);
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false));
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDeleteTerms(flushAtDelCount)
.setMaxBufferedDocs(1000)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergePolicy(NoMergePolicy.INSTANCE)
.setReaderPooling(false));
int count = 0;
while(true) {
Document doc = new Document();
@ -1110,8 +1121,11 @@ public class TestIndexWriterDelete extends LuceneTestCase {
final AtomicBoolean closing = new AtomicBoolean();
final AtomicBoolean sawAfterFlush = new AtomicBoolean();
IndexWriter w = new IndexWriter(dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false)) {
newIndexWriterConfig(new MockAnalyzer(random()))
.setRAMBufferSizeMB(0.5)
.setMaxBufferedDocs(-1)
.setMergePolicy(NoMergePolicy.INSTANCE)
.setReaderPooling(false)) {
@Override
public void doAfterFlush() {
assertTrue("only " + docsInSegment.get() + " in segment", closing.get() || docsInSegment.get() >= 7);

View File

@ -248,8 +248,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(analyzer)
.setRAMBufferSizeMB(0.1)
.setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
if (VERBOSE) {
@ -291,8 +292,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
.setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(analyzer)
.setRAMBufferSizeMB(0.2)
.setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1());
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
writer.commit();
@ -372,7 +374,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionDocumentsWriterInit() throws IOException {
Directory dir = newDirectory();
TestPoint2 testPoint = new TestPoint2();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint);
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc);
@ -390,7 +392,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1208
public void testExceptionJustBeforeFlush() throws IOException {
Directory dir = newDirectory();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2), new TestPoint1());
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir,
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2),
new TestPoint1());
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc);
@ -433,8 +438,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1210
public void testExceptionOnMergeInit() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy());
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
cms.setSuppressExceptions();
conf.setMergeScheduler(cms);
@ -460,7 +466,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// LUCENE-1072
public void testExceptionFromTokenStream() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new Analyzer() {
IndexWriterConfig conf = newIndexWriterConfig(new Analyzer() {
@Override
public TokenStreamComponents createComponents(String fieldName) {
@ -578,7 +584,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
failure.setDoFail();
dir.failOn(failure);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
doc.add(newTextField("content", contents, Field.Store.NO));
@ -615,7 +622,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
System.out.println("TEST: cycle i=" + i);
}
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setMergePolicy(newLogMergePolicy()));
// don't allow a sudden merge to clean up the deleted
// doc below:
@ -669,8 +677,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
analyzer).setMaxBufferedDocs(10));
writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(10));
doc = new Document();
doc.add(newField("contents", "here are some contents", DocCopyIterator.custom5));
for(int j=0;j<17;j++)
@ -712,8 +720,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Directory dir = newDirectory();
{
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1)
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(-1)
.setMergePolicy(NoMergePolicy.INSTANCE));
// don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc.
final int finalI = i;
@ -781,8 +789,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertEquals(NUM_THREAD*NUM_ITER, numDel);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(10));
Document doc = new Document();
doc.add(newField("contents", "here are some contents", DocCopyIterator.custom5));
for(int j=0;j<17;j++)
@ -843,10 +851,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(5))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(5))
);
failure.setDoFail();
@ -931,8 +939,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for (FailOnlyInCommit failure : failures) {
MockDirectoryWrapper dir = newMockDirectory();
dir.setFailOnCreateOutput(false);
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a field", Field.Store.YES));
w.addDocument(doc);
@ -955,7 +962,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testForceMergeExceptions() throws IOException {
Directory startDir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy());
((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100);
IndexWriter w = new IndexWriter(startDir, conf);
for(int i=0;i<27;i++)
@ -968,7 +977,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
System.out.println("TEST: iter " + i);
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler());
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new ConcurrentMergeScheduler());
((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
w = new IndexWriter(dir, conf);
dir.setRandomIOExceptionRate(0.5);
@ -991,7 +1001,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
final AtomicBoolean thrown = new AtomicBoolean(false);
final Directory dir = newDirectory();
final IndexWriter writer = new IndexWriter(dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setInfoStream(new InfoStream() {
newIndexWriterConfig(new MockAnalyzer(random()))
.setInfoStream(new InfoStream() {
@Override
public void message(String component, final String message) {
if (message.startsWith("now flush at shutdown") && thrown.compareAndSet(false, true)) {
@ -1035,7 +1046,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testRollbackExceptionHang() throws Throwable {
Directory dir = newDirectory();
TestPoint4 testPoint = new TestPoint4();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint);
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint);
addDoc(w);
@ -1058,7 +1069,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1090,7 +1101,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
reader.close();
// should remove the corrumpted segments_N
new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)).shutdown();
new IndexWriter(dir, newIndexWriterConfig(null)).shutdown();
dir.close();
}
@ -1103,7 +1114,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1152,8 +1163,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(true)).setUseCompoundFile(true)
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(true))
.setUseCompoundFile(true)
);
MergePolicy lmp = writer.getConfig().getMergePolicy();
// Force creation of CFS:
@ -1206,7 +1218,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
// add 100 documents
for (int i = 0; i < 100; i++) {
@ -1244,7 +1256,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
reader.close();
try {
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
} catch (Exception e) {
e.printStackTrace(System.out);
fail("writer failed to open on a crashed index");
@ -1268,8 +1281,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for (int j = 0; j < num; j++) {
for (FailOnTermVectors failure : failures) {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
dir.failOn(failure);
int numDocs = 10 + random().nextInt(30);
for (int i = 0; i < numDocs; i++) {
@ -1695,12 +1707,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testExceptionOnCtor() throws Exception {
UOEDirectory uoe = new UOEDirectory();
Directory d = new MockDirectoryWrapper(random(), uoe);
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(null));
iw.addDocument(new Document());
iw.shutdown();
uoe.doFail = true;
try {
new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
new IndexWriter(d, newIndexWriterConfig(null));
fail("should have gotten a UOE");
} catch (UnsupportedOperationException expected) {
}
@ -1711,7 +1723,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testIllegalPositions() throws Exception {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
Document doc = new Document();
Token t1 = new Token("foo", 0, 3);
t1.setPositionIncrement(Integer.MAX_VALUE);
@ -1734,7 +1746,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testLegalbutVeryLargePositions() throws Exception {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
Document doc = new Document();
Token t1 = new Token("foo", 0, 3);
t1.setPositionIncrement(Integer.MAX_VALUE-500);
@ -1940,7 +1952,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
System.out.println("\nTEST: iter=" + iter + " numDocs=" + numDocs + " docBase=" + docBase + " delCount=" + deleteCount);
}
if (w == null) {
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
final MergeScheduler ms = iwc.getMergeScheduler();
if (ms instanceof ConcurrentMergeScheduler) {
final ConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeScheduler() {

View File

@ -90,7 +90,7 @@ public class TestIndexWriterExceptions2 extends LuceneTestCase {
Codec inner = RANDOM_MULTIPLIER > 1 ? Codec.getDefault() : new AssertingCodec();
Codec codec = new CrankyCodec(inner, new Random(random().nextLong()));
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());
conf.setCodec(codec);

View File

@ -40,10 +40,10 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMinMergeDocs(1);
ldmp.setMergeFactor(5);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy(
ldmp));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(2)
.setMergePolicy(ldmp));
for(int j=0;j<numDocs;j++)
writer.addDocument(doc);
writer.shutdown();
@ -54,8 +54,8 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(5);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMergePolicy(ldmp));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(ldmp));
writer.forceMerge(3);
writer.shutdown();
@ -80,9 +80,10 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMinMergeDocs(1);
ldmp.setMergeFactor(4);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergePolicy(ldmp)
.setMergeScheduler(new ConcurrentMergeScheduler()));
for(int iter=0;iter<10;iter++) {
for(int i=0;i<19;i++)
@ -121,7 +122,9 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
public void testForceMergeTempSpaceUsage() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy()));
if (VERBOSE) {
System.out.println("TEST: config1=" + writer.getConfig());
}
@ -150,7 +153,9 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
dir.resetMaxUsedSizeInBytes();
dir.setTrackDiskUsage(true);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
long maxDiskUsage = dir.getMaxUsedSizeInBytes();
@ -168,10 +173,10 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
for(int pass=0;pass<2;pass++) {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(51))
newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(51))
);
Document doc = new Document();
doc.add(newStringField("field", "aaa", Field.Store.NO));

View File

@ -33,9 +33,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNormalCase() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++) {
addDoc(writer);
@ -50,9 +50,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testNoOverMerge() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(new LogDocMergePolicy()));
boolean noOverMerge = false;
for (int i = 0; i < 100; i++) {
@ -75,9 +75,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
LogDocMergePolicy mp = new LogDocMergePolicy();
mp.setMinMergeDocs(100);
mp.setMergeFactor(10);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(mp));
for (int i = 0; i < 100; i++) {
addDoc(writer);
@ -85,9 +85,10 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
mp = new LogDocMergePolicy();
mp.setMergeFactor(10);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(mp));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10)
.setMergePolicy(mp));
mp.setMinMergeDocs(100);
checkInvariants(writer);
}
@ -102,10 +103,10 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy()).
setMergeScheduler(new SerialMergeScheduler())
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy())
.setMergeScheduler(new SerialMergeScheduler())
);
for (int i = 0; i < 250; i++) {
@ -130,9 +131,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
public void testMaxBufferedDocsChange() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(101)
.setMergePolicy(new LogDocMergePolicy())
.setMergeScheduler(new SerialMergeScheduler()));
// leftmost* segment has 1 doc
@ -144,18 +145,21 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
}
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(101).setMergePolicy(new LogDocMergePolicy())
.setMergeScheduler(new SerialMergeScheduler()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(101)
.setMergePolicy(new LogDocMergePolicy())
.setMergeScheduler(new SerialMergeScheduler()));
}
writer.shutdown();
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(10);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new SerialMergeScheduler()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10)
.setMergePolicy(ldmp)
.setMergeScheduler(new SerialMergeScheduler()));
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@ -182,9 +186,9 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
LogDocMergePolicy ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(100);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(ldmp));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(ldmp));
for (int i = 0; i < 250; i++) {
addDoc(writer);
@ -195,17 +199,19 @@ public class TestIndexWriterMergePolicy extends LuceneTestCase {
// delete some docs without merging
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(NoMergePolicy.INSTANCE)
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE)
);
writer.deleteDocuments(new Term("content", "aaa"));
writer.shutdown();
ldmp = new LogDocMergePolicy();
ldmp.setMergeFactor(5);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(10).setMergePolicy(ldmp).setMergeScheduler(new ConcurrentMergeScheduler()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(10)
.setMergePolicy(ldmp)
.setMergeScheduler(new ConcurrentMergeScheduler()));
// merge factor is changed, so check invariants after all adds
for (int i = 0; i < 10; i++) {

View File

@ -62,8 +62,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
IndexWriter writer = new IndexWriter(
merged,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(2))
);
writer.addIndexes(indexA, indexB);
writer.forceMerge(1);
@ -102,10 +102,10 @@ public class TestIndexWriterMerging extends LuceneTestCase
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(2))
);
for (int i = start; i < (start + numDocs); i++)
@ -122,10 +122,9 @@ public class TestIndexWriterMerging extends LuceneTestCase
// are required
public void testForceMergeDeletes() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH));
Document document = new Document();
FieldType customType = new FieldType();
@ -165,7 +164,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(8, ir.numDocs());
ir.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
assertEquals(8, writer.numDocs());
assertEquals(10, writer.maxDoc());
writer.forceMergeDeletes();
@ -183,10 +183,10 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
setMergePolicy(newLogMergePolicy(50))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergePolicy(newLogMergePolicy(50))
);
Document document = new Document();
@ -231,8 +231,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(3))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(3))
);
assertEquals(49, writer.numDocs());
writer.forceMergeDeletes();
@ -250,10 +250,10 @@ public class TestIndexWriterMerging extends LuceneTestCase
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).
setMergePolicy(newLogMergePolicy(50))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergePolicy(newLogMergePolicy(50))
);
FieldType customType = new FieldType();
@ -296,8 +296,8 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(3))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(3))
);
writer.forceMergeDeletes(false);
writer.shutdown();
@ -333,9 +333,10 @@ public class TestIndexWriterMerging extends LuceneTestCase
// LUCENE-1013
public void testSetMaxMergeDocs() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy());
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new MyMergeScheduler())
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy());
LogMergePolicy lmp = (LogMergePolicy) conf.getMergePolicy();
lmp.setMaxMergeDocs(20);
lmp.setMergeFactor(2);
@ -371,8 +372,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
System.out.println("TEST: pass=" + pass);
}
IndexWriterConfig conf = newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy());
@ -447,7 +447,9 @@ public class TestIndexWriterMerging extends LuceneTestCase
reader.close();
// Reopen
writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
}
writer.shutdown();
}

View File

@ -38,8 +38,7 @@ public class TestIndexWriterNRTIsCurrent extends LuceneTestCase {
public void testIsCurrentWithThreads() throws
IOException, InterruptedException {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
ReaderHolder holder = new ReaderHolder();
ReaderThread[] threads = new ReaderThread[atLeast(3)];

View File

@ -63,7 +63,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory());
dir.setMaxSizeInBytes(diskFree);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
MergeScheduler ms = writer.getConfig().getMergeScheduler();
if (ms instanceof ConcurrentMergeScheduler) {
// This test intentionally produces exceptions
@ -175,7 +175,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
long inputDiskUsage = 0;
for(int i=0;i<NUM_DIR;i++) {
dirs[i] = newDirectory();
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dirs[i], newIndexWriterConfig(new MockAnalyzer(random())));
for(int j=0;j<25;j++) {
addDocWithIndex(writer, 25*i+j);
}
@ -189,7 +189,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Now, build a starting index that has START_COUNT docs. We
// will then try to addIndexes into a copy of this:
MockDirectoryWrapper startDir = newMockDirectory();
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(new MockAnalyzer(random())));
for(int j=0;j<START_COUNT;j++) {
addDocWithIndex(writer, j);
}
@ -255,7 +255,9 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Make a new dir that will enforce disk usage:
MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy(false)));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy(false)));
IOException err = null;
MergeScheduler ms = writer.getConfig().getMergeScheduler();
@ -490,10 +492,10 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
//IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setReaderPooling(true));
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergeScheduler(new SerialMergeScheduler()).
setReaderPooling(true).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new SerialMergeScheduler())
.setReaderPooling(true)
.setMergePolicy(newLogMergePolicy(2))
);
// we can do this because we add/delete/add (and dont merge to "nothing")
w.setKeepFullyDeletedSegments(true);
@ -532,8 +534,9 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// OK:
public void testImmediateDiskFull() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergeScheduler(new ConcurrentMergeScheduler()));
dir.setMaxSizeInBytes(Math.max(1, dir.getRecomputedActualSizeInBytes()));
final Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);

View File

@ -53,7 +53,7 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
try {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig iwc = newIndexWriterConfig(analyzer);
if (VERBOSE) {
// Do this ourselves instead of relying on LTC so
@ -143,7 +143,7 @@ public class TestIndexWriterOutOfFileDescriptors extends LuceneTestCase {
// files ... we can easily have leftover files at
// the time we take a copy because we are holding
// open a reader:
new IndexWriter(dirCopy, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))).shutdown();
new IndexWriter(dirCopy, newIndexWriterConfig(new MockAnalyzer(random()))).shutdown();
dirCopy.setRandomIOExceptionRate(rate);
dir.setRandomIOExceptionRateOnOpen(rate);
}

View File

@ -89,7 +89,7 @@ public class TestIndexWriterOutOfMemory extends LuceneTestCase {
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
dir.setUseSlowOpenClosers(false);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
// just for now, try to keep this test reproducible
conf.setMergeScheduler(new SerialMergeScheduler());

View File

@ -70,7 +70,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Can't use assertNoDeletes: this test pulls a non-NRT
// reader in the end:
Directory dir1 = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir1, iwc);
for (int i = 0; i < 97 ; i++) {
@ -104,7 +104,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertTrue(reader.isCurrent());
writer.shutdown();
assertTrue(reader.isCurrent()); // all changes are visible to the reader
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir1, iwc);
assertTrue(reader.isCurrent());
writer.addDocument(DocHelper.createDocument(1, "x", 1+random().nextInt(5)));
@ -119,7 +119,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = true;
Directory dir1 = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
if (iwc.getMaxBufferedDocs() < 20) {
iwc.setMaxBufferedDocs(20);
}
@ -166,7 +166,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
assertEquals(0, count(new Term("id", id10), r3));
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a b c", Field.Store.NO));
writer.addDocument(doc);
@ -186,7 +186,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testIsCurrent() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -194,7 +194,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.addDocument(doc);
writer.shutdown();
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, iwc);
doc = new Document();
doc.add(newTextField("field", "a b c", Field.Store.NO));
@ -229,7 +229,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = false;
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
if (iwc.getMaxBufferedDocs() < 20) {
iwc.setMaxBufferedDocs(20);
}
@ -243,7 +243,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// create a 2nd index
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
createIndexNoClose(!doFullMerge, "index2", writer2);
writer2.shutdown();
@ -280,11 +280,11 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = false;
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
// create a 2nd index
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
createIndexNoClose(!doFullMerge, "index2", writer2);
writer2.shutdown();
@ -310,7 +310,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
boolean doFullMerge = true;
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
// create the index
createIndexNoClose(!doFullMerge, "index1", writer);
writer.flush(false, true);
@ -347,7 +347,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.shutdown();
// reopen the writer to verify the delete made it to the directory
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader w2r1 = writer.getReader();
assertEquals(0, count(new Term("id", id10), w2r1));
w2r1.close();
@ -361,7 +361,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory mainDir = getAssertNoDeletesDirectory(newDirectory());
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
IndexWriter mainWriter = new IndexWriter(mainDir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
TestUtil.reduceOpenFiles(mainWriter);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
@ -404,7 +405,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
this.numDirs = numDirs;
this.mainWriter = mainWriter;
addDir = newDirectory();
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
IndexWriter writer = new IndexWriter(addDir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
TestUtil.reduceOpenFiles(writer);
for (int i = 0; i < NUM_INIT_DOCS; i++) {
Document doc = DocHelper.createDocument(i, "addindex", 4);
@ -516,7 +518,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
*/
public void doTestIndexWriterReopenSegment(boolean doFullMerge) throws Exception {
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
createIndexNoClose(false, "index1", writer);
@ -552,7 +554,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.shutdown();
// test whether the changes made it to the directory
writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader w2r1 = writer.getReader();
// insure the deletes were actually flushed to the directory
assertEquals(200, w2r1.maxDoc());
@ -610,11 +612,11 @@ public class TestIndexWriterReader extends LuceneTestCase {
MyWarmer warmer = new MyWarmer();
IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergedSegmentWarmer(warmer).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy())
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergedSegmentWarmer(warmer)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy())
);
// create the index
@ -645,7 +647,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testAfterCommit() throws Exception {
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new ConcurrentMergeScheduler()));
writer.commit();
// create the index
@ -677,7 +680,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
// Make sure reader remains usable even if IndexWriter closes
public void testAfterClose() throws Exception {
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())));
// create the index
createIndexNoClose(false, "test", writer);
@ -707,8 +710,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory dir1 = getAssertNoDeletesDirectory(newDirectory());
final IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(2))
);
// create the index
@ -801,8 +804,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory dir1 = newDirectory();
final IndexWriter writer = new IndexWriter(
dir1,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy(2))
);
// create the index
@ -881,7 +884,8 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testForceMergeDeletes() throws Throwable {
Directory dir = newDirectory();
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
Document doc = new Document();
doc.add(newTextField("field", "a b c", Field.Store.NO));
Field id = newStringField("id", "", Field.Store.NO);
@ -905,7 +909,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testDeletesNumDocs() throws Throwable {
Directory dir = newDirectory();
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a b c", Field.Store.NO));
Field id = newStringField("id", "", Field.Store.NO);
@ -935,7 +939,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
public void testEmptyIndex() throws Exception {
// Ensures that getReader works on an empty index, which hasn't been committed yet.
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader r = w.getReader();
assertEquals(0, r.numDocs());
r.close();
@ -948,7 +952,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
final AtomicBoolean didWarm = new AtomicBoolean();
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setReaderPooling(true).
setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
@ -995,12 +999,12 @@ public class TestIndexWriterReader extends LuceneTestCase {
};
IndexWriter w = new IndexWriter(
dir,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setReaderPooling(true).
setInfoStream(infoStream).
setMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream)).
setMergePolicy(newLogMergePolicy(10))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setReaderPooling(true)
.setInfoStream(infoStream)
.setMergedSegmentWarmer(new SimpleMergedSegmentWarmer(infoStream))
.setMergePolicy(newLogMergePolicy(10))
);
Document doc = new Document();
@ -1018,7 +1022,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
Directory d = getAssertNoDeletesDirectory(newDirectory());
IndexWriter w = new IndexWriter(
d,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
DirectoryReader r = w.getReader(); // start pooling readers
@ -1075,7 +1079,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
}
});
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges from getting in the way
IndexWriter writer = new IndexWriter(dir, conf);

View File

@ -233,7 +233,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
public void testEmbeddedFFFF() throws Throwable {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newTextField("field", "a a\uffffb", Field.Store.NO));
w.addDocument(doc);
@ -250,7 +250,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
// LUCENE-510
public void testInvalidUTF16() throws Throwable {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new TestIndexWriter.StringSplitAnalyzer()));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new TestIndexWriter.StringSplitAnalyzer()));
Document doc = new Document();
final int count = utf8Data.length/2;

View File

@ -137,10 +137,10 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(4))
);
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
dir.setMaxSizeInBytes(4*1024+20*iter);
@ -184,10 +184,10 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(4))
);
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
@ -259,10 +259,10 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(4))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergeScheduler(new ConcurrentMergeScheduler())
.setMergePolicy(newLogMergePolicy(4))
);
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
@ -317,8 +317,9 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
public void _testSingleThreadFailure(MockDirectoryWrapper.Failure failure) throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setMergeScheduler(new ConcurrentMergeScheduler()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setMergeScheduler(new ConcurrentMergeScheduler()));
final Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
@ -523,8 +524,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
Document doc = new Document();
Field field = newTextField("field", "testData", Field.Store.YES);
doc.add(field);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
iwConstructed.countDown();
startIndexing.await();
writer.addDocument(doc);
@ -551,7 +551,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
writerRef.set(new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)));
writerRef.set(new IndexWriter(d, newIndexWriterConfig(analyzer)));
final LineFileDocs docs = new LineFileDocs(random());
final Thread[] threads = new Thread[threadCount];
final int iters = atLeast(100);
@ -577,7 +577,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: " + Thread.currentThread().getName() + ": rollback done; now open new writer");
}
writerRef.set(new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))));
writerRef.set(new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))));
} finally {
rollbackLock.unlock();
}

View File

@ -77,9 +77,9 @@ public class TestLazyProxSkipping extends LuceneTestCase {
// note: test explicitly disables payloads
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(false))
newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy(false))
);
for (int i = 0; i < numDocs; i++) {
@ -144,7 +144,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
public void testSeek() throws IOException {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(newTextField(this.field, "a b", Field.Store.YES));

View File

@ -109,7 +109,7 @@ public class TestLongPostings extends LuceneTestCase {
}
final IndexReader r;
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
final IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
.setMergePolicy(newLogMergePolicy());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random().nextDouble());
@ -307,7 +307,7 @@ public class TestLongPostings extends LuceneTestCase {
final IndexReader r;
if (true) {
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
final IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(IndexWriterConfig.OpenMode.CREATE)
.setMergePolicy(newLogMergePolicy());
iwc.setRAMBufferSizeMB(16.0 + 16.0 * random().nextDouble());

View File

@ -45,8 +45,8 @@ public class TestMaxTermFrequency extends LuceneTestCase {
public void setUp() throws Exception {
super.setUp();
dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)).setMergePolicy(newLogMergePolicy());
IndexWriterConfig config = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true))
.setMergePolicy(newLogMergePolicy());
config.setSimilarity(new TestSimilarity());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
Document doc = new Document();

View File

@ -45,7 +45,7 @@ public class TestMixedCodecs extends LuceneTestCase {
System.out.println("TEST: " + docUpto + " of " + NUM_DOCS);
}
if (docsLeftInThisSegment == 0) {
final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
final IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
if (random().nextBoolean()) {
// Make sure we aggressively mix in SimpleText
// since it has different impls for all codec

View File

@ -46,7 +46,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
public void testManyReopensAndFields() throws Exception {
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
LogMergePolicy lmp = newLogMergePolicy();
lmp.setMergeFactor(3); // merge often
conf.setMergePolicy(lmp);
@ -155,7 +155,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
public void testStressMultiThreading() throws Exception {
final Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
final IndexWriter writer = new IndexWriter(dir, conf);
// create index
@ -291,7 +291,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
public void testUpdateDifferentDocsInDifferentGens() throws Exception {
// update same document multiple times across generations
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(4);
IndexWriter writer = new IndexWriter(dir, conf);
final int numDocs = atLeast(10);
@ -330,7 +330,7 @@ public class TestMixedDocValuesUpdates extends LuceneTestCase {
// LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
IndexWriter writer = new IndexWriter(dir, conf);

View File

@ -36,7 +36,8 @@ public class TestMultiFields extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
// we can do this because we use NoMergePolicy (and dont merge to "nothing")
w.setKeepFullyDeletedSegments(true);
@ -155,7 +156,7 @@ public class TestMultiFields extends LuceneTestCase {
public void testSeparateEnums() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newStringField("f", "j", Field.Store.NO));
w.addDocument(d);
@ -173,7 +174,7 @@ public class TestMultiFields extends LuceneTestCase {
public void testTermDocsEnum() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
d.add(newStringField("f", "j", Field.Store.NO));
w.addDocument(d);

View File

@ -68,7 +68,9 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
public void testSimpleSkip() throws IOException {
Directory dir = new CountingRAMDirectory(new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new PayloadAnalyzer()).setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new PayloadAnalyzer())
.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()))
.setMergePolicy(newLogMergePolicy()));
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++) {
Document d1 = new Document();

View File

@ -36,9 +36,9 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
}
IndexWriter writer = new IndexWriter(
mainDir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(false,2))
newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy(false,2))
);
IndexReader reader = writer.getReader(); // start pooling readers
reader.close();

View File

@ -46,9 +46,8 @@ public class TestNeverDelete extends LuceneTestCase {
}
final RandomIndexWriter w = new RandomIndexWriter(random(),
d,
newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
w.w.getConfig().setMaxBufferedDocs(TestUtil.nextInt(random(), 5, 30));
w.commit();

View File

@ -24,7 +24,7 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestNewestSegment extends LuceneTestCase {
public void testNewestSegment() throws Exception {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())));
assertNull(writer.newestSegment());
writer.shutdown();
directory.close();

View File

@ -70,9 +70,8 @@ public class TestNoDeletionPolicy extends LuceneTestCase {
@Test
public void testAllCommitsRemain() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(newTextField("c", "a" + i, Field.Store.YES));

View File

@ -77,7 +77,7 @@ public class TestNorms extends LuceneTestCase {
Directory dir = newDirectory();
MockAnalyzer analyzer = new MockAnalyzer(random());
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
IndexWriterConfig config = newIndexWriterConfig(analyzer);
config.setSimilarity(new CustomNormEncodingSimilarity());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
Document doc = new Document();
@ -129,8 +129,7 @@ public class TestNorms extends LuceneTestCase {
Random random = random();
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
analyzer);
IndexWriterConfig config = newIndexWriterConfig(analyzer);
Similarity provider = new MySimProvider();
config.setSimilarity(provider);
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);

View File

@ -69,8 +69,8 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdatesAreFlushed() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setRAMBufferSizeMB(0.00000001));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))
.setRAMBufferSizeMB(0.00000001));
writer.addDocument(doc(0)); // val=1
writer.addDocument(doc(1)); // val=2
writer.addDocument(doc(3)); // val=2
@ -92,7 +92,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testSimple() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// make sure random config doesn't flush on us
conf.setMaxBufferedDocs(10);
conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
@ -126,7 +126,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateFewSegments() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(2); // generate few segments
conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
@ -174,7 +174,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testReopen() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(doc(0));
writer.addDocument(doc(1));
@ -211,7 +211,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// create an index with a segment with only deletes, a segment with both
// deletes and updates and a segment with only updates
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test
IndexWriter writer = new IndexWriter(dir, conf);
@ -261,7 +261,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testUpdatesWithDeletes() throws Exception {
// update and delete different documents in the same commit session
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
IndexWriter writer = new IndexWriter(dir, conf);
@ -296,7 +296,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testUpdateAndDeleteSameDocument() throws Exception {
// update and delete same document in same commit session
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // control segment flushing
IndexWriter writer = new IndexWriter(dir, conf);
@ -330,7 +330,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testMultipleDocValuesTypes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // prevent merges
IndexWriter writer = new IndexWriter(dir, conf);
@ -381,7 +381,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testMultipleNumericDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(10); // prevent merges
IndexWriter writer = new IndexWriter(dir, conf);
@ -414,7 +414,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testDocumentWithNoValue() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
for (int i = 0; i < 2; i++) {
@ -447,7 +447,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// we don't support adding new fields or updating existing non-numeric-dv
// fields through numeric updates
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -478,7 +478,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testDifferentDVFormatPerField() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(new Lucene49Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
@ -516,7 +516,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateSameDocMultipleTimes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -544,7 +544,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testSegmentMerges() throws Exception {
Directory dir = newDirectory();
Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
IndexWriter writer = new IndexWriter(dir, conf);
int docid = 0;
@ -572,7 +572,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.commit();
} else if (random.nextDouble() < 0.1) {
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
conf = newIndexWriterConfig(new MockAnalyzer(random));
writer = new IndexWriter(dir, conf);
}
@ -616,7 +616,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testUpdateDocumentByMultipleTerms() throws Exception {
// make sure the order of updates is respected, even when multiple terms affect same document
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -645,7 +645,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testManyReopensAndFields() throws Exception {
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
LogMergePolicy lmp = newLogMergePolicy();
lmp.setMergeFactor(3); // merge often
conf.setMergePolicy(lmp);
@ -735,7 +735,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateSegmentWithNoDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// legit.
@ -789,7 +789,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateSegmentWithPostingButNoDocValues() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// legit.
@ -833,7 +833,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// this used to fail because FieldInfos.Builder neglected to update
// globalFieldMaps.docValueTypes map
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -859,7 +859,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
boolean oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
// create a segment with an old Codec
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]);
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
IndexWriter writer = new IndexWriter(dir, conf);
@ -869,7 +869,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
writer.shutdown();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.updateNumericDocValue(new Term("id", "doc"), "f", 4L);
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
@ -888,7 +888,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testStressMultiThreading() throws Exception {
final Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
final IndexWriter writer = new IndexWriter(dir, conf);
// create index
@ -1018,7 +1018,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
public void testUpdateDifferentDocsInDifferentGens() throws Exception {
// update same document multiple times across generations
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMaxBufferedDocs(4);
IndexWriter writer = new IndexWriter(dir, conf);
final int numDocs = atLeast(10);
@ -1055,7 +1055,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testChangeCodec() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene49Codec() {
@Override
@ -1072,7 +1072,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.shutdown();
// change format
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions.
conf.setCodec(new Lucene49Codec() {
@Override
@ -1104,7 +1104,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testAddIndexes() throws Exception {
Directory dir1 = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir1, conf);
final int numDocs = atLeast(50);
@ -1134,7 +1134,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.shutdown();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir2, conf);
if (random().nextBoolean()) {
writer.addIndexes(dir1);
@ -1162,7 +1162,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testDeleteUnusedUpdatesFiles() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1194,7 +1194,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// LUCENE-5248: make sure that when there are many updates, we don't use too much RAM
Directory dir = newDirectory();
final Random random = random();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc
IndexWriter writer = new IndexWriter(dir, conf);
@ -1260,7 +1260,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdatesOrder() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1287,7 +1287,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateAllDeletedSegment() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1312,7 +1312,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
@Test
public void testUpdateTwoNonexistingTerms() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1337,7 +1337,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
// LUCENE-5591: make sure we pass an IOContext with an approximate
// segmentSize in FlushInfo
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// we want a single large enough segment so that a doc-values update writes a large file
conf.setMergePolicy(NoMergePolicy.INSTANCE);
conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush
@ -1350,7 +1350,7 @@ public class TestNumericDocValuesUpdates extends LuceneTestCase {
writer.close();
NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, 100, 1/(1024.*1024.));
conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
conf = newIndexWriterConfig(new MockAnalyzer(random()));
// we want a single large enough segment so that a doc-values update writes a large file
conf.setMergePolicy(NoMergePolicy.INSTANCE);
conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush

View File

@ -35,7 +35,7 @@ public class TestOmitNorms extends LuceneTestCase {
public void testOmitNorms() throws Exception {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer));
Document d = new Document();
// this field will have norms
@ -82,9 +82,9 @@ public class TestOmitNorms extends LuceneTestCase {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(
ram,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(3).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(3)
.setMergePolicy(newLogMergePolicy(2))
);
Document d = new Document();
@ -137,9 +137,9 @@ public class TestOmitNorms extends LuceneTestCase {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(
ram,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(10)
.setMergePolicy(newLogMergePolicy(2))
);
Document d = new Document();
@ -189,8 +189,9 @@ public class TestOmitNorms extends LuceneTestCase {
public void testNoNrmFile() throws Throwable {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(3)
.setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setNoCFSRatio(0.0);
@ -267,7 +268,8 @@ public class TestOmitNorms extends LuceneTestCase {
*/
NumericDocValues getNorms(String field, Field f1, Field f2) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy());
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy());
RandomIndexWriter riw = new RandomIndexWriter(random(), dir, iwc);
// add f1

View File

@ -67,7 +67,7 @@ public class TestOmitPositions extends LuceneTestCase {
public void testPositions() throws Exception {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer));
Document d = new Document();
// f1,f2,f3: docs only
@ -190,8 +190,9 @@ public class TestOmitPositions extends LuceneTestCase {
public void testNoPrxFile() throws Throwable {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(3)
.setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setNoCFSRatio(0.0);

View File

@ -75,7 +75,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer));
Document d = new Document();
// this field will have Tf
@ -122,7 +122,7 @@ public class TestOmitTf extends LuceneTestCase {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(
ram,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
newIndexWriterConfig(analyzer).
setMaxBufferedDocs(3).
setMergePolicy(newLogMergePolicy(2))
);
@ -175,7 +175,7 @@ public class TestOmitTf extends LuceneTestCase {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(
ram,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
newIndexWriterConfig(analyzer).
setMaxBufferedDocs(10).
setMergePolicy(newLogMergePolicy(2))
);
@ -222,8 +222,9 @@ public class TestOmitTf extends LuceneTestCase {
public void testNoPrxFile() throws Throwable {
Directory ram = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(3)
.setMergePolicy(newLogMergePolicy()));
LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy();
lmp.setMergeFactor(2);
lmp.setNoCFSRatio(0.0);
@ -263,10 +264,10 @@ public class TestOmitTf extends LuceneTestCase {
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setMaxBufferedDocs(2).
setSimilarity(new SimpleSimilarity()).
setMergePolicy(newLogMergePolicy(2))
newIndexWriterConfig(analyzer)
.setMaxBufferedDocs(2)
.setSimilarity(new SimpleSimilarity())
.setMergePolicy(newLogMergePolicy(2))
);
StringBuilder sb = new StringBuilder(265);
@ -444,7 +445,7 @@ public class TestOmitTf extends LuceneTestCase {
public void testStats() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter iw = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
ft.setIndexOptions(IndexOptions.DOCS_ONLY);

View File

@ -144,7 +144,7 @@ public class TestParallelAtomicReader extends LuceneTestCase {
// one document only:
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
Document d3 = new Document();
d3.add(newTextField("f3", "v1", Field.Store.YES));
@ -261,7 +261,7 @@ public class TestParallelAtomicReader extends LuceneTestCase {
// Fields 1-4 indexed together:
private IndexSearcher single(Random random) throws IOException {
dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)));
Document d1 = new Document();
d1.add(newTextField("f1", "v1", Field.Store.YES));
d1.add(newTextField("f2", "v1", Field.Store.YES));
@ -293,7 +293,7 @@ public class TestParallelAtomicReader extends LuceneTestCase {
private Directory getDir1(Random random) throws IOException {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random)));
Document d1 = new Document();
d1.add(newTextField("f1", "v1", Field.Store.YES));
d1.add(newTextField("f2", "v1", Field.Store.YES));
@ -308,7 +308,7 @@ public class TestParallelAtomicReader extends LuceneTestCase {
private Directory getDir2(Random random) throws IOException {
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random)));
Document d3 = new Document();
d3.add(newTextField("f3", "v1", Field.Store.YES));
d3.add(newTextField("f4", "v1", Field.Store.YES));

View File

@ -220,7 +220,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
// one document only:
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
Document d3 = new Document();
d3.add(newTextField("f3", "v1", Field.Store.YES));
@ -419,7 +419,7 @@ public class TestParallelCompositeReader extends LuceneTestCase {
// Fields 1-4 indexed together:
private IndexSearcher single(Random random, boolean compositeComposite) throws IOException {
dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)));
Document d1 = new Document();
d1.add(newTextField("f1", "v1", Field.Store.YES));
d1.add(newTextField("f2", "v1", Field.Store.YES));
@ -478,8 +478,8 @@ public class TestParallelCompositeReader extends LuceneTestCase {
// subreader structure: (1,2,1)
private Directory getDir1(Random random) throws IOException {
Directory dir1 = newDirectory();
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter w1 = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document();
d1.add(newTextField("f1", "v1", Field.Store.YES));
d1.add(newTextField("f2", "v1", Field.Store.YES));
@ -505,8 +505,8 @@ public class TestParallelCompositeReader extends LuceneTestCase {
// subreader structure: (1,2,1)
private Directory getDir2(Random random) throws IOException {
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document();
d1.add(newTextField("f3", "v1", Field.Store.YES));
d1.add(newTextField("f4", "v1", Field.Store.YES));
@ -532,8 +532,8 @@ public class TestParallelCompositeReader extends LuceneTestCase {
// this dir has a different subreader structure (1,1,2);
private Directory getInvalidStructuredDir2(Random random) throws IOException {
Directory dir2 = newDirectory();
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.INSTANCE));
IndexWriter w2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random))
.setMergePolicy(NoMergePolicy.INSTANCE));
Document d1 = new Document();
d1.add(newTextField("f3", "v1", Field.Store.YES));
d1.add(newTextField("f4", "v1", Field.Store.YES));

View File

@ -39,14 +39,14 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
*/
public void testEmptyIndex() throws IOException {
Directory rd1 = newDirectory();
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(new MockAnalyzer(random())));
iw.shutdown();
// create a copy:
Directory rd2 = newDirectory(rd1);
Directory rdOut = newDirectory();
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random())));
ParallelAtomicReader apr = new ParallelAtomicReader(
SlowCompositeReaderWrapper.wrap(DirectoryReader.open(rd1)),
@ -89,7 +89,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: make 1st writer");
}
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(rd1, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
Field idField = newTextField("id", "", Field.Store.NO);
doc.add(idField);
@ -117,14 +117,15 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
assertEquals(1, ir.numDocs());
ir.close();
iw = new IndexWriter(rd1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw = new IndexWriter(rd1, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
iw.forceMerge(1);
iw.shutdown();
}
Directory rd2 = newDirectory();
{
IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(rd2, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
iw.addDocument(doc);
iw.shutdown();
@ -132,7 +133,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
Directory rdOut = newDirectory();
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig(new MockAnalyzer(random())));
final DirectoryReader reader1, reader2;
ParallelAtomicReader pr = new ParallelAtomicReader(
SlowCompositeReaderWrapper.wrap(reader1 = DirectoryReader.open(rd1)),

View File

@ -41,8 +41,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
super.setUp();
Document doc;
rd1 = newDirectory();
IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw1 = new IndexWriter(rd1, newIndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(newTextField("field1", "the quick brown fox jumps", Field.Store.YES));
@ -51,8 +50,7 @@ public class TestParallelTermEnum extends LuceneTestCase {
iw1.shutdown();
rd2 = newDirectory();
IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw2 = new IndexWriter(rd2, newIndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(newTextField("field1", "the fox jumps over the lazy dog", Field.Store.YES));

View File

@ -60,7 +60,7 @@ public class TestPayloads extends LuceneTestCase {
public void testPayloadFieldBit() throws Exception {
Directory ram = newDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(ram, newIndexWriterConfig(analyzer));
Document d = new Document();
// this field won't have any payloads
d.add(newTextField("f1", "This field has no payloads", Field.Store.NO));
@ -88,8 +88,8 @@ public class TestPayloads extends LuceneTestCase {
// now we add another document which has payloads for field f3 and verify if the SegmentMerger
// enabled payloads for that field
analyzer = new PayloadAnalyzer(); // Clear payload state for each field
writer = new IndexWriter(ram, newIndexWriterConfig( TEST_VERSION_CURRENT,
analyzer).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(ram, newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.CREATE));
d = new Document();
d.add(newTextField("f1", "This field has no payloads", Field.Store.NO));
d.add(newTextField("f2", "This field has payloads in all docs", Field.Store.NO));
@ -125,8 +125,7 @@ public class TestPayloads extends LuceneTestCase {
// different tests to verify the payload encoding
private void performTest(Directory dir) throws Exception {
PayloadAnalyzer analyzer = new PayloadAnalyzer();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, analyzer)
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.CREATE)
.setMergePolicy(newLogMergePolicy()));
@ -264,8 +263,8 @@ public class TestPayloads extends LuceneTestCase {
// test long payload
analyzer = new PayloadAnalyzer();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
analyzer).setOpenMode(OpenMode.CREATE));
writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.CREATE));
String singleTerm = "lucene";
d = new Document();
@ -452,8 +451,7 @@ public class TestPayloads extends LuceneTestCase {
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
Directory dir = newDirectory();
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
final String field = "test";
Thread[] ingesters = new Thread[numThreads];
@ -584,7 +582,7 @@ public class TestPayloads extends LuceneTestCase {
/** some docs have payload att, some not */
public void testMixupDocs() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig iwc = newIndexWriterConfig(null);
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();

View File

@ -39,7 +39,7 @@ public class TestPayloadsOnVectors extends LuceneTestCase {
/** some docs have payload att, some not */
public void testMixupDocs() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();

View File

@ -56,7 +56,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = newIndexWriterConfig(new MockAnalyzer(random()));
}
public void testBasic() throws Exception {
@ -129,7 +129,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
public void doTestNumbers(boolean withPayloads) throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = withPayloads ? new MockPayloadAnalyzer() : new MockAnalyzer(random());
iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
iwc = newIndexWriterConfig(analyzer);
iwc.setMergePolicy(newLogMergePolicy()); // will rely on docids a bit for skipping
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
@ -485,7 +485,7 @@ public class TestPostingsOffsets extends LuceneTestCase {
public void testLegalbutVeryLargeOffsets() throws Exception {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null));
Document doc = new Document();
Token t1 = new Token("foo", 0, Integer.MAX_VALUE-500);
if (random().nextBoolean()) {

View File

@ -37,8 +37,8 @@ public class TestReaderClosed extends LuceneTestCase {
super.setUp();
dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)));
newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)));
Document doc = new Document();
Field field = newStringField("field", "", Field.Store.NO);

View File

@ -37,7 +37,9 @@ public class TestRollback extends LuceneTestCase {
rw.shutdown();
// If buffer size is small enough to cause a flush, errors ensue...
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setOpenMode(IndexWriterConfig.OpenMode.APPEND));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setOpenMode(IndexWriterConfig.OpenMode.APPEND));
for (int i = 0; i < 3; i++) {
Document doc = new Document();

View File

@ -49,7 +49,7 @@ public class TestRollingUpdates extends LuceneTestCase {
MockAnalyzer analyzer = new MockAnalyzer(random());
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
final int SIZE = atLeast(20);
int id = 0;
IndexReader r = null;
@ -160,8 +160,8 @@ public class TestRollingUpdates extends LuceneTestCase {
final LineFileDocs docs = new LineFileDocs(random());
for (int r = 0; r < 3; r++) {
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2));
final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
final int numUpdates = atLeast(20);
int numThreads = TestUtil.nextInt(random(), 2, 6);
IndexingThread[] threads = new IndexingThread[numThreads];

View File

@ -100,7 +100,8 @@ public class TestSegmentTermDocs extends LuceneTestCase {
public void testSkipTo() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(newLogMergePolicy()));
Term ta = new Term("content","aaa");
for(int i = 0; i < 10; i++)

View File

@ -49,7 +49,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
public void testTermEnum() throws IOException {
IndexWriter writer = null;
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
// ADD 100 documents with term : aaa
// add 100 documents with terms: aaa bbb
@ -65,7 +65,8 @@ public class TestSegmentTermEnum extends LuceneTestCase {
verifyDocFreq();
// merge segments
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
@ -75,7 +76,8 @@ public class TestSegmentTermEnum extends LuceneTestCase {
public void testPrevTermAtEnd() throws IOException
{
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())));
addDoc(writer, "aaa bbb");
writer.shutdown();
SegmentReader reader = getOnlySegmentReader(DirectoryReader.open(dir));

View File

@ -44,7 +44,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
}
private static IndexWriterConfig newWriterConfig() {
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig conf = newIndexWriterConfig(null);
conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
// prevent any merges by default.

View File

@ -42,7 +42,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
public static final String INDEX_PATH = "test.snapshots";
protected IndexWriterConfig getConfig(Random random, IndexDeletionPolicy dp) {
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random));
if (dp != null) {
conf.setIndexDeletionPolicy(dp);
}
@ -105,8 +105,8 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
final long stopTime = System.currentTimeMillis() + 1000;
SnapshotDeletionPolicy dp = getDeletionPolicy();
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(dp)
final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setIndexDeletionPolicy(dp)
.setMaxBufferedDocs(2));
// Verify we catch misuse:

View File

@ -116,10 +116,10 @@ public class TestStressIndexing extends LuceneTestCase {
stress test.
*/
public void runStressTest(Directory directory, MergeScheduler mergeScheduler) throws Exception {
IndexWriter modifier = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergeScheduler(
mergeScheduler));
IndexWriter modifier = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(10)
.setMergeScheduler(mergeScheduler));
modifier.commit();
TimedThread[] threads = new TimedThread[4];

View File

@ -146,9 +146,11 @@ public class TestStressIndexing2 extends LuceneTestCase {
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<>();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1)
.setMaxBufferedDocs(maxBufferedDocs)
.setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
w.commit();
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setNoCFSRatio(0.0);
@ -197,10 +199,13 @@ public class TestStressIndexing2 extends LuceneTestCase {
public Map<String,Document> indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates,
boolean doReaderPooling) throws IOException, InterruptedException {
Map<String,Document> docs = new HashMap<>();
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates))
.setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE)
.setRAMBufferSizeMB(0.1)
.setMaxBufferedDocs(maxBufferedDocs)
.setIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates))
.setReaderPooling(doReaderPooling)
.setMergePolicy(newLogMergePolicy()), new YieldTestPoint());
LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy();
lmp.setNoCFSRatio(0.0);
lmp.setMergeFactor(mergeFactor);

View File

@ -106,7 +106,7 @@ public class TestStressNRT extends LuceneTestCase {
Directory dir = newDirectory();
final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.setDoRandomForceMergeAssert(false);
writer.commit();
reader = DirectoryReader.open(dir);

View File

@ -91,9 +91,10 @@ public class TestTermVectorsReader extends LuceneTestCase {
dir = newDirectory();
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()).
newIndexWriterConfig(new MyAnalyzer()).
setMaxBufferedDocs(-1).
setMergePolicy(newLogMergePolicy(false, 10)).setUseCompoundFile(false)
setMergePolicy(newLogMergePolicy(false, 10))
.setUseCompoundFile(false)
);
Document doc = new Document();

View File

@ -43,8 +43,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1442
public void testDoubleOffsetCounting() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -103,7 +102,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1442
public void testDoubleOffsetCounting2() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -138,7 +137,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1448
public void testEndOffsetPositionCharAnalyzer() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -174,7 +173,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
public void testEndOffsetPositionWithCachingTokenFilter() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer));
Document doc = new Document();
try (TokenStream stream = analyzer.tokenStream("field", "abcd ")) {
stream.reset(); // TODO: weird to reset before wrapping with CachingTokenFilter... correct?
@ -213,8 +212,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1448
public void testEndOffsetPositionStopFilter() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET)));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -249,8 +247,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1448
public void testEndOffsetPositionStandard() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -294,8 +291,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -333,8 +329,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField2() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);
@ -379,12 +374,11 @@ public class TestTermVectorsWriter extends LuceneTestCase {
Directory dir = newDirectory();
for(int iter=0;iter<2;iter++) {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler(
new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(new LogDocMergePolicy()));
Document document = new Document();
FieldType customType = new FieldType();
@ -415,11 +409,11 @@ public class TestTermVectorsWriter extends LuceneTestCase {
}
reader.close();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMaxBufferedDocs(2)
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(new LogDocMergePolicy()));
Directory[] indexDirs = {new MockDirectoryWrapper(random(), new RAMDirectory(dir, newIOContext(random())))};
writer.addIndexes(indexDirs);
@ -433,12 +427,11 @@ public class TestTermVectorsWriter extends LuceneTestCase {
public void testTermVectorCorruption2() throws IOException {
Directory dir = newDirectory();
for(int iter=0;iter<2;iter++) {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler(
new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(new LogDocMergePolicy()));
Document document = new Document();
@ -474,11 +467,11 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1168
public void testTermVectorCorruption3() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMaxBufferedDocs(2).setRAMBufferSizeMB(
IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler(
new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy()));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(new LogDocMergePolicy()));
Document document = new Document();
FieldType customType = new FieldType();
@ -496,11 +489,11 @@ public class TestTermVectorsWriter extends LuceneTestCase {
writer.addDocument(document);
writer.shutdown();
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT,
new MockAnalyzer(random())).setMaxBufferedDocs(2)
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2)
.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
.setMergeScheduler(new SerialMergeScheduler())
.setMergePolicy(new LogDocMergePolicy()));
for(int i=0;i<6;i++)
writer.addDocument(document);
@ -519,8 +512,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1008
public void testNoTermVectorAfterTermVector() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document document = new Document();
FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED);
customType2.setStoreTermVectors(true);
@ -550,8 +542,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
// LUCENE-1010
public void testNoTermVectorAfterTermVectorMerge() throws IOException {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
Document document = new Document();
FieldType customType = new FieldType(StringField.TYPE_NOT_STORED);
customType.setStoreTermVectors(true);

View File

@ -89,10 +89,10 @@ public class TestTermdocPerf extends LuceneTestCase {
doc.add(newStringField(field, val, Field.Store.NO));
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(100).
setMergePolicy(newLogMergePolicy(100))
newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(100)
.setMergePolicy(newLogMergePolicy(100))
);
for (int i=0; i<ndocs; i++) {

View File

@ -351,7 +351,7 @@ public class TestTermsEnum extends LuceneTestCase {
private IndexReader makeIndex(String... terms) throws Exception {
d = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
/*
iwc.setCodec(new StandardCodec(minTermsInBlock, maxTermsInBlock));
@ -718,7 +718,7 @@ public class TestTermsEnum extends LuceneTestCase {
public void testIntersectBasic() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(new LogDocMergePolicy());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
@ -768,7 +768,7 @@ public class TestTermsEnum extends LuceneTestCase {
}
public void testIntersectStartTerm() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(new LogDocMergePolicy());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
@ -825,7 +825,7 @@ public class TestTermsEnum extends LuceneTestCase {
public void testIntersectEmptyString() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(new LogDocMergePolicy());
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();
@ -977,7 +977,7 @@ public class TestTermsEnum extends LuceneTestCase {
}
sb.append(' ');
sb.append(termsList.get(termCount).utf8ToString());
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc);
Document doc = new Document();

View File

@ -53,8 +53,7 @@ public class TestTermsEnum2 extends LuceneTestCase {
numIterations = atLeast(50);
dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir,
newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false))
.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)));
Document doc = new Document();
Field field = newStringField("field", "", Field.Store.YES);

View File

@ -58,7 +58,7 @@ public class TestThreadedForceMerge extends LuceneTestCase {
IndexWriter writer = new IndexWriter(
directory,
newIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).
newIndexWriterConfig(ANALYZER).
setOpenMode(OpenMode.CREATE).
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy())
@ -125,9 +125,9 @@ public class TestThreadedForceMerge extends LuceneTestCase {
assertEquals("index=" + writer.segString() + " numDocs=" + writer.numDocs() + " maxDoc=" + writer.maxDoc() + " config=" + writer.getConfig(), expectedDocCount, writer.maxDoc());
writer.shutdown();
writer = new IndexWriter(directory, newIndexWriterConfig(
TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
writer = new IndexWriter(directory, newIndexWriterConfig(ANALYZER)
.setOpenMode(OpenMode.APPEND)
.setMaxBufferedDocs(2));
DirectoryReader reader = DirectoryReader.open(directory);
assertEquals("reader=" + reader, 1, reader.leaves().size());

View File

@ -32,7 +32,7 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
public void testForceMergeDeletes() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
TieredMergePolicy tmp = newTieredMergePolicy();
conf.setMergePolicy(tmp);
conf.setMaxBufferedDocs(4);
@ -75,7 +75,7 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
System.out.println("TEST: iter=" + iter);
}
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setMergeScheduler(new SerialMergeScheduler());
TieredMergePolicy tmp = newTieredMergePolicy();
conf.setMergePolicy(tmp);
@ -112,7 +112,7 @@ public class TestTieredMergePolicy extends BaseMergePolicyTestCase {
public void testForceMergeDeletesMaxSegSize() throws Exception {
final Directory dir = newDirectory();
final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
final IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
final TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setMaxMergedSegmentMB(0.01);
tmp.setForceMergeDeletesPctAllowed(0.0);

View File

@ -66,9 +66,9 @@ public class TestTransactionRollback extends LuceneTestCase {
throw new RuntimeException("Couldn't find commit point "+id);
}
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(
new RollbackDeletionPolicy(id)).setIndexCommit(last));
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new RollbackDeletionPolicy(id))
.setIndexCommit(last));
Map<String,String> data = new HashMap<>();
data.put("index", "Rolled back to 1-"+id);
w.setCommitData(data);
@ -131,7 +131,8 @@ public class TestTransactionRollback extends LuceneTestCase {
//Build index, of records 1 to 100, committing after each batch of 10
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
IndexWriter w=new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(sdp));
IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(sdp));
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
Document doc=new Document();
@ -212,7 +213,7 @@ public class TestTransactionRollback extends LuceneTestCase {
for(int i=0;i<2;i++) {
// Unless you specify a prior commit point, rollback
// should not work:
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(new DeleteLastCommitPolicy())).shutdown();
IndexReader r = DirectoryReader.open(dir);
assertEquals(100, r.numDocs());

View File

@ -100,7 +100,7 @@ public class TestTransactions extends LuceneTestCase {
IndexWriter writer1 = new IndexWriter(
dir1,
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setMaxBufferedDocs(3).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(2))
@ -111,7 +111,7 @@ public class TestTransactions extends LuceneTestCase {
// happen @ different times
IndexWriter writer2 = new IndexWriter(
dir2,
newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).
newIndexWriterConfig(new MockAnalyzer(random())).
setMaxBufferedDocs(2).
setMergeScheduler(new ConcurrentMergeScheduler()).
setMergePolicy(newLogMergePolicy(3))
@ -212,7 +212,7 @@ public class TestTransactions extends LuceneTestCase {
}
public void initIndex(Directory dir) throws Throwable {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
for(int j=0; j<7; j++) {
Document d = new Document();
int n = random().nextInt();

Some files were not shown because too many files have changed in this diff Show More