diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java index 1ffe02fa84e..2daf25394df 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestEmptyTokenStream.java @@ -52,8 +52,7 @@ public class TestEmptyTokenStream extends BaseTokenStreamTestCase { public void testIndexWriter_LUCENE4656() throws IOException { Directory directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, null)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(null)); TokenStream ts = new EmptyTokenStream(); assertFalse(ts.hasAttribute(TermToBytesRefAttribute.class)); diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java index 873734d4f10..addf2d8b20c 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/sinks/TestTeeSinkTokenFilter.java @@ -90,7 +90,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase { public void testEndOffsetPositionWithTeeSinkTokenFilter() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer)); Document doc = new Document(); TokenStream tokenStream = analyzer.tokenStream("field", "abcd "); TeeSinkTokenFilter tee = new TeeSinkTokenFilter(tokenStream); diff --git a/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/Test10KPulsings.java b/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/Test10KPulsings.java index c338491cf91..3c2e1a9c205 100644 --- a/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/Test10KPulsings.java +++ b/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/Test10KPulsings.java @@ -56,7 +56,7 @@ public class Test10KPulsings extends LuceneTestCase { BaseDirectoryWrapper dir = newFSDirectory(f); dir.setCheckIndexOnClose(false); // we do this ourselves explicitly RandomIndexWriter iw = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); Document document = new Document(); FieldType ft = new FieldType(TextField.TYPE_STORED); @@ -107,7 +107,7 @@ public class Test10KPulsings extends LuceneTestCase { BaseDirectoryWrapper dir = newFSDirectory(f); dir.setCheckIndexOnClose(false); // we do this ourselves explicitly RandomIndexWriter iw = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); Document document = new Document(); FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java b/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java index 53f5cf8da27..1df62519256 100644 --- a/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java +++ b/lucene/codecs/src/test/org/apache/lucene/codecs/pulsing/TestPulsingReuse.java @@ -47,7 +47,7 @@ public class TestPulsingReuse extends LuceneTestCase { Codec cp = TestUtil.alwaysPostingsFormat(new Pulsing41PostingsFormat(1)); Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); Document doc = new Document(); doc.add(new TextField("foo", "a b b c c c d e f g g h i i j j k", Field.Store.NO)); iw.addDocument(doc); @@ -85,7 +85,7 @@ public class TestPulsingReuse extends LuceneTestCase { Codec cp = TestUtil.alwaysPostingsFormat(new NestedPulsingPostingsFormat()); BaseDirectoryWrapper dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); Document doc = new Document(); doc.add(new TextField("foo", "a b b c c c d e f g g g h i i j j k l l m m m", Field.Store.NO)); // note: the reuse is imperfect, here we would have 4 enums (lost reuse when we get an enum for 'm') diff --git a/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java b/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java index 125d94f3903..1e262601478 100644 --- a/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/TestExternalCodecs.java @@ -69,7 +69,7 @@ public class TestExternalCodecs extends LuceneTestCase { dir.setCheckIndexOnClose(false); // we use a custom codec provider IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setCodec(new CustomPerFieldCodec()). setMergePolicy(newLogMergePolicy(3)) ); diff --git a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java index 339f8718879..7ad2346b138 100644 --- a/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java +++ b/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java @@ -93,8 +93,8 @@ public class TestMergeSchedulerExternal extends LuceneTestCase { Field idField = newStringField("id", "", Field.Store.YES); doc.add(idField); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new MyMergeScheduler()) + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergeScheduler(new MyMergeScheduler()) .setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setMergePolicy(newLogMergePolicy())); LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy(); diff --git a/lucene/core/src/test/org/apache/lucene/TestSearch.java b/lucene/core/src/test/org/apache/lucene/TestSearch.java index a72d28df9f9..5a18c65fdbc 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearch.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearch.java @@ -42,7 +42,7 @@ public class TestSearch extends LuceneTestCase { Directory directory = newDirectory(); try { Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); IndexWriter writer = new IndexWriter(directory, conf); try { @@ -110,7 +110,7 @@ public class TestSearch extends LuceneTestCase { throws Exception { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); MergePolicy mp = conf.getMergePolicy(); mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); IndexWriter writer = new IndexWriter(directory, conf); diff --git a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java index 56260c13434..c7ea5097ad6 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java @@ -70,7 +70,7 @@ public class TestSearchForDuplicates extends LuceneTestCase { private void doTest(Random random, PrintWriter out, boolean useCompoundFiles, int MAX_DOCS) throws Exception { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); final MergePolicy mp = conf.getMergePolicy(); mp.setNoCFSRatio(useCompoundFiles ? 1.0 : 0.0); IndexWriter writer = new IndexWriter(directory, conf); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java index 1e8f62f57af..d7bf6b2ca30 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/compressing/TestCompressingStoredFieldsFormat.java @@ -46,7 +46,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes @Test(expected=IllegalArgumentException.class) public void testDeletePartiallyWrittenFilesIfAbort() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); iwConf.setCodec(CompressingCodec.randomInstance(random())); // disable CFS because this test checks file names diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestLucene40PostingsReader.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestLucene40PostingsReader.java index 4cc0ef72383..096b9665759 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestLucene40PostingsReader.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestLucene40PostingsReader.java @@ -54,7 +54,7 @@ public class TestLucene40PostingsReader extends LuceneTestCase { */ public void testPostings() throws Exception { Directory dir = newFSDirectory(createTempDir("postings")); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(Codec.forName("Lucene40")); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java index 5268eb062f4..968d8cbfe02 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene40/TestReuseDocsEnum.java @@ -52,7 +52,7 @@ public class TestReuseDocsEnum extends LuceneTestCase { Directory dir = newDirectory(); Codec cp = TestUtil.alwaysPostingsFormat(new Lucene40RWPostingsFormat()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); int numdocs = atLeast(20); createRandomIndex(numdocs, writer, random()); writer.commit(); @@ -80,7 +80,7 @@ public class TestReuseDocsEnum extends LuceneTestCase { Directory dir = newDirectory(); Codec cp = TestUtil.alwaysPostingsFormat(new Lucene40RWPostingsFormat()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp)); + newIndexWriterConfig(new MockAnalyzer(random())).setCodec(cp)); int numdocs = atLeast(20); createRandomIndex(numdocs, writer, random()); writer.commit(); @@ -128,7 +128,7 @@ public class TestReuseDocsEnum extends LuceneTestCase { analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setCodec(cp)); + newIndexWriterConfig(analyzer).setCodec(cp)); int numdocs = atLeast(20); createRandomIndex(numdocs, writer, random()); writer.commit(); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat2.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat2.java index 1f2cebb1cf7..fafd5779790 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat2.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat2.java @@ -44,7 +44,7 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase { public void setUp() throws Exception { super.setUp(); dir = newFSDirectory(createTempDir("testDFBlockSize")); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())); iw = new RandomIndexWriter(random(), dir, iwc); iw.setDoRandomForceMerge(false); // we will ourselves @@ -54,7 +54,7 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase { public void tearDown() throws Exception { iw.shutdown(); TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())); iwc.setOpenMode(OpenMode.APPEND); IndexWriter iw = new IndexWriter(dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat3.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat3.java index 26c2b06f860..59ee086693b 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat3.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene41/TestBlockPostingsFormat3.java @@ -82,7 +82,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase { } } }; - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())); // TODO we could actually add more fields implemented with different PFs // or, just put this test into the usual rotation? @@ -137,7 +137,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase { iw.shutdown(); verify(dir); TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge - iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + iwc = newIndexWriterConfig(analyzer); iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat())); iwc.setOpenMode(OpenMode.APPEND); IndexWriter iw2 = new IndexWriter(dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java index 5453c486922..6bb347f607b 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java @@ -79,7 +79,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase { Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); final DocValuesFormat fast = DocValuesFormat.forName("Lucene49"); final DocValuesFormat slow = DocValuesFormat.forName("SimpleText"); iwc.setCodec(new Lucene49Codec() { diff --git a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java index 76605166602..259e7fb3347 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java @@ -97,8 +97,8 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase { @Test public void testMergeUnusedPerFieldCodec() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec()); + IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE).setCodec(new MockCodec()); IndexWriter writer = newWriter(dir, iwconf); addDocs(writer, 10); writer.commit(); @@ -124,8 +124,8 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase { if (VERBOSE) { System.out.println("TEST: make new index"); } - IndexWriterConfig iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(new MockCodec()); + IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE).setCodec(new MockCodec()); iwconf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); //((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10); IndexWriter writer = newWriter(dir, iwconf); @@ -144,7 +144,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase { assertQuery(new Term("content", "aaa"), dir, 10); Codec codec = iwconf.getCodec(); - iwconf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + iwconf = newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND).setCodec(codec); //((LogMergePolicy) iwconf.getMergePolicy()).setNoCFSRatio(0.0); //((LogMergePolicy) iwconf.getMergePolicy()).setMergeFactor(10); @@ -301,7 +301,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase { private void doTestMixedPostings(Codec codec) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(codec); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java index 264f3b3affd..cb2c75b3aec 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java @@ -59,8 +59,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())) + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE)); // add 100 documents addDocs(writer, 100); @@ -70,7 +69,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setMergePolicy(newLogMergePolicy(false)) ); @@ -79,14 +78,14 @@ public class TestAddIndexes extends LuceneTestCase { assertEquals(40, writer.maxDoc()); writer.shutdown(); - writer = newWriter(aux2, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); + writer = newWriter(aux2, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); // add 50 documents in compound files addDocs2(writer, 50); assertEquals(50, writer.maxDoc()); writer.shutdown(); // test doc count before segments are merged - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); assertEquals(100, writer.maxDoc()); writer.addIndexes(aux, aux2); assertEquals(190, writer.maxDoc()); @@ -101,14 +100,14 @@ public class TestAddIndexes extends LuceneTestCase { // now add another set in. Directory aux3 = newDirectory(); - writer = newWriter(aux3, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = newWriter(aux3, newIndexWriterConfig(new MockAnalyzer(random()))); // add 40 documents addDocs(writer, 40); assertEquals(40, writer.maxDoc()); writer.shutdown(); // test doc count before segments are merged - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); assertEquals(190, writer.maxDoc()); writer.addIndexes(aux3); assertEquals(230, writer.maxDoc()); @@ -122,7 +121,7 @@ public class TestAddIndexes extends LuceneTestCase { verifyTermDocs(dir, new Term("content", "bbb"), 50); // now fully merge it. - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); writer.forceMerge(1); writer.shutdown(); @@ -135,11 +134,11 @@ public class TestAddIndexes extends LuceneTestCase { // now add a single document Directory aux4 = newDirectory(); - writer = newWriter(aux4, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = newWriter(aux4, newIndexWriterConfig(new MockAnalyzer(random()))); addDocs2(writer, 1); writer.shutdown(); - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); assertEquals(230, writer.maxDoc()); writer.addIndexes(aux4); assertEquals(231, writer.maxDoc()); @@ -162,7 +161,7 @@ public class TestAddIndexes extends LuceneTestCase { Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); writer.addIndexes(aux); // Adds 10 docs, then replaces them with another 10 @@ -198,7 +197,7 @@ public class TestAddIndexes extends LuceneTestCase { Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: @@ -236,7 +235,7 @@ public class TestAddIndexes extends LuceneTestCase { Directory aux = newDirectory(); setUpDirs(dir, aux); - IndexWriter writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: @@ -276,7 +275,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents addDocs(writer, 100); assertEquals(100, writer.maxDoc()); @@ -284,7 +283,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false)) @@ -294,7 +293,7 @@ public class TestAddIndexes extends LuceneTestCase { writer.shutdown(); writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false)) @@ -302,7 +301,7 @@ public class TestAddIndexes extends LuceneTestCase { addDocs(writer, 100); writer.shutdown(); - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); try { // cannot add self writer.addIndexes(aux, dir); @@ -332,7 +331,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy(4)) @@ -361,7 +360,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(9). setMergePolicy(newLogMergePolicy(4)) @@ -390,7 +389,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(10). setMergePolicy(newLogMergePolicy(4)) @@ -429,7 +428,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(4). setMergePolicy(newLogMergePolicy(4)) @@ -458,7 +457,7 @@ public class TestAddIndexes extends LuceneTestCase { IndexWriter writer = newWriter( aux2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(100). setMergePolicy(newLogMergePolicy(10)) @@ -492,7 +491,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(6). setMergePolicy(newLogMergePolicy(4)) @@ -555,7 +554,7 @@ public class TestAddIndexes extends LuceneTestCase { private void setUpDirs(Directory dir, Directory aux, boolean withID) throws IOException { IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000)); // add 1000 documents in 1 segment if (withID) { addDocsWithID(writer, 1000, 0); @@ -568,7 +567,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false, 10)) @@ -583,7 +582,7 @@ public class TestAddIndexes extends LuceneTestCase { writer.shutdown(); writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMaxBufferedDocs(1000). setMergePolicy(newLogMergePolicy(false, 10)) @@ -602,7 +601,7 @@ public class TestAddIndexes extends LuceneTestCase { lmp.setNoCFSRatio(0.0); lmp.setMergeFactor(100); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) + new MockAnalyzer(random())) .setMaxBufferedDocs(5).setMergePolicy(lmp)); Document doc = new Document(); @@ -630,8 +629,7 @@ public class TestAddIndexes extends LuceneTestCase { lmp.setMinMergeMB(0.0001); lmp.setNoCFSRatio(0.0); lmp.setMergeFactor(4); - writer = new IndexWriter(dir2, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())) + writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())) .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp)); writer.addIndexes(dir); writer.shutdown(); @@ -973,7 +971,7 @@ public class TestAddIndexes extends LuceneTestCase { Directory[] dirs = new Directory[2]; for (int i = 0; i < dirs.length; i++) { dirs[i] = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dirs[i], conf); Document doc = new Document(); doc.add(new StringField("id", "myid", Field.Store.NO)); @@ -1021,8 +1019,8 @@ public class TestAddIndexes extends LuceneTestCase { Codec codec = new CustomPerFieldCodec(); IndexWriter writer = null; - writer = newWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setCodec(codec)); + writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE).setCodec(codec)); // add 100 documents addDocsWithID(writer, 100, 0); assertEquals(100, writer.maxDoc()); @@ -1032,7 +1030,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( aux, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setCodec(codec). setMaxBufferedDocs(10). @@ -1046,7 +1044,7 @@ public class TestAddIndexes extends LuceneTestCase { writer = newWriter( aux2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.CREATE). setCodec(codec) ); @@ -1059,7 +1057,7 @@ public class TestAddIndexes extends LuceneTestCase { // test doc count before segments are merged writer = newWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setCodec(codec) ); @@ -1137,8 +1135,7 @@ public class TestAddIndexes extends LuceneTestCase { // of the unregistered codec: toAdd.setCheckIndexOnClose(false); { - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(new UnRegisteredCodec()); IndexWriter w = new IndexWriter(toAdd, conf); Document doc = new Document(); @@ -1151,8 +1148,7 @@ public class TestAddIndexes extends LuceneTestCase { { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(TestUtil.alwaysPostingsFormat(new Pulsing41PostingsFormat(1 + random().nextInt(20)))); IndexWriter w = new IndexWriter(dir, conf); try { @@ -1268,7 +1264,7 @@ public class TestAddIndexes extends LuceneTestCase { Directory dest = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setWriteLockTimeout(1); RandomIndexWriter w2 = new RandomIndexWriter(random(), dest, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java index 95cd6179993..00a6917b597 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveChecksumFooter.java @@ -38,7 +38,7 @@ import org.apache.lucene.util.TestUtil; public class TestAllFilesHaveChecksumFooter extends LuceneTestCase { public void test() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(new Lucene49Codec()); RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java index bc9e2622720..cb3192b6a78 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesHaveCodecHeader.java @@ -38,7 +38,7 @@ import org.apache.lucene.util.TestUtil; public class TestAllFilesHaveCodecHeader extends LuceneTestCase { public void test() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(new Lucene49Codec()); RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java index 756adaa70f8..4858931353e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java @@ -276,7 +276,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { case 0: return new IndexUpgrader(dir, TEST_VERSION_CURRENT); case 1: return new IndexUpgrader(dir, TEST_VERSION_CURRENT, streamType ? null : InfoStream.NO_OUTPUT, false); - case 2: return new IndexUpgrader(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null), false); + case 2: return new IndexUpgrader(dir, newIndexWriterConfig(null), false); default: fail("case statement didn't get updated when random bounds changed"); } return null; // never get here @@ -330,8 +330,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { } try { - writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); fail("IndexWriter creation should not pass for "+unsupportedNames[i]); } catch (IndexFormatTooOldException e) { // pass @@ -386,8 +385,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { System.out.println("\nTEST: old index " + name); } Directory targetDir = newDirectory(); - IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random()))); w.addIndexes(oldIndexDirs.get(name)); if (VERBOSE) { System.out.println("\nTEST: done adding indices; now close"); @@ -403,8 +401,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { IndexReader reader = DirectoryReader.open(oldIndexDirs.get(name)); Directory targetDir = newDirectory(); - IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random()))); w.addIndexes(reader); w.shutdown(); reader.close(); @@ -604,7 +601,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase { public void changeIndexWithAdds(Random random, Directory dir, String origOldName) throws IOException { // open writer - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)) + .setOpenMode(OpenMode.APPEND) + .setMergePolicy(newLogMergePolicy())); // add 10 docs for(int i=0;i<10;i++) { addDoc(writer, 35+i); @@ -630,7 +629,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase { reader.close(); // fully merge - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy())); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)) + .setOpenMode(OpenMode.APPEND) + .setMergePolicy(newLogMergePolicy())); writer.forceMerge(1); writer.shutdown(); @@ -655,7 +656,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase { reader.close(); // fully merge - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)) + .setOpenMode(OpenMode.APPEND)); writer.forceMerge(1); writer.shutdown(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java index 3d809a680e6..2f83c1e4793 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestBinaryDocValuesUpdates.java @@ -93,8 +93,8 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdatesAreFlushed() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setRAMBufferSizeMB(0.00000001)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setRAMBufferSizeMB(0.00000001)); writer.addDocument(doc(0)); // val=1 writer.addDocument(doc(1)); // val=2 writer.addDocument(doc(3)); // val=2 @@ -115,7 +115,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testSimple() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // make sure random config doesn't flush on us conf.setMaxBufferedDocs(10); conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH); @@ -148,7 +148,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateFewSegments() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(2); // generate few segments conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test IndexWriter writer = new IndexWriter(dir, conf); @@ -195,7 +195,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testReopen() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); writer.addDocument(doc(0)); writer.addDocument(doc(1)); @@ -233,7 +233,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { // create an index with a segment with only deletes, a segment with both // deletes and updates and a segment with only updates Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(10); // control segment flushing conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges for this test IndexWriter writer = new IndexWriter(dir, conf); @@ -282,7 +282,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdatesWithDeletes() throws Exception { // update and delete different documents in the same commit session Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(10); // control segment flushing IndexWriter writer = new IndexWriter(dir, conf); @@ -316,7 +316,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateAndDeleteSameDocument() throws Exception { // update and delete same document in same commit session Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(10); // control segment flushing IndexWriter writer = new IndexWriter(dir, conf); @@ -349,7 +349,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testMultipleDocValuesTypes() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(10); // prevent merges IndexWriter writer = new IndexWriter(dir, conf); @@ -398,7 +398,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testMultipleBinaryDocValues() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(10); // prevent merges IndexWriter writer = new IndexWriter(dir, conf); @@ -431,7 +431,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testDocumentWithNoValue() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < 2; i++) { @@ -463,7 +463,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { // we don't support adding new fields or updating existing non-binary-dv // fields through binary updates Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -493,7 +493,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testDifferentDVFormatPerField() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(new Lucene49Codec() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -530,7 +530,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateSameDocMultipleTimes() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -557,7 +557,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testSegmentMerges() throws Exception { Directory dir = newDirectory(); Random random = random(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random)); IndexWriter writer = new IndexWriter(dir, conf); int docid = 0; @@ -585,7 +585,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { writer.commit(); } else if (random.nextDouble() < 0.1) { writer.shutdown(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + conf = newIndexWriterConfig(new MockAnalyzer(random)); writer = new IndexWriter(dir, conf); } @@ -628,7 +628,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateDocumentByMultipleTerms() throws Exception { // make sure the order of updates is respected, even when multiple terms affect same document Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -656,7 +656,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testManyReopensAndFields() throws Exception { Directory dir = newDirectory(); final Random random = random(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random)); LogMergePolicy lmp = newLogMergePolicy(); lmp.setMergeFactor(3); // merge often conf.setMergePolicy(lmp); @@ -745,7 +745,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateSegmentWithNoDocValues() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // prevent merges, otherwise by the time updates are applied // (writer.shutdown()), the segments might have merged and that update becomes // legit. @@ -799,7 +799,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateSegmentWithPostingButNoDocValues() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // prevent merges, otherwise by the time updates are applied // (writer.shutdown()), the segments might have merged and that update becomes // legit. @@ -842,7 +842,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { // this used to fail because FieldInfos.Builder neglected to update // globalFieldMaps.docValueTypes map Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -867,7 +867,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { boolean oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE; // create a segment with an old Codec - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]); OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true; IndexWriter writer = new IndexWriter(dir, conf); @@ -877,7 +877,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { writer.addDocument(doc); writer.shutdown(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir, conf); writer.updateBinaryDocValue(new Term("id", "doc"), "f", toBytes(4L)); OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false; @@ -895,7 +895,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testStressMultiThreading() throws Exception { final Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); final IndexWriter writer = new IndexWriter(dir, conf); // create index @@ -1024,7 +1024,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateDifferentDocsInDifferentGens() throws Exception { // update same document multiple times across generations Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(4); IndexWriter writer = new IndexWriter(dir, conf); final int numDocs = atLeast(10); @@ -1060,7 +1060,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testChangeCodec() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions. conf.setCodec(new Lucene49Codec() { @Override @@ -1077,7 +1077,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { writer.shutdown(); // change format - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMergePolicy(NoMergePolicy.INSTANCE); // disable merges to simplify test assertions. conf.setCodec(new Lucene49Codec() { @Override @@ -1108,7 +1108,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testAddIndexes() throws Exception { Directory dir1 = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir1, conf); final int numDocs = atLeast(50); @@ -1138,7 +1138,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { writer.shutdown(); Directory dir2 = newDirectory(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + conf = newIndexWriterConfig(new MockAnalyzer(random())); writer = new IndexWriter(dir2, conf); if (random().nextBoolean()) { writer.addIndexes(dir1); @@ -1165,7 +1165,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testDeleteUnusedUpdatesFiles() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -1196,7 +1196,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { // LUCENE-5248: make sure that when there are many updates, we don't use too much RAM Directory dir = newDirectory(); final Random random = random(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random)); conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB); conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); // don't flush by doc IndexWriter writer = new IndexWriter(dir, conf); @@ -1262,7 +1262,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdatesOrder() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -1288,7 +1288,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateAllDeletedSegment() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -1312,7 +1312,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { public void testUpdateTwoNonexistingTerms() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -1337,7 +1337,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { // LUCENE-5591: make sure we pass an IOContext with an approximate // segmentSize in FlushInfo Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // we want a single large enough segment so that a doc-values update writes a large file conf.setMergePolicy(NoMergePolicy.INSTANCE); conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush @@ -1350,7 +1350,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase { writer.close(); NRTCachingDirectory cachingDir = new NRTCachingDirectory(dir, 100, 1/(1024.*1024.)); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + conf = newIndexWriterConfig(new MockAnalyzer(random())); // we want a single large enough segment so that a doc-values update writes a large file conf.setMergePolicy(NoMergePolicy.INSTANCE); conf.setMaxBufferedDocs(Integer.MAX_VALUE); // manually flush diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java b/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java index fbf1cf50150..6cb526c54ec 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCheckIndex.java @@ -39,7 +39,8 @@ public class TestCheckIndex extends LuceneTestCase { public void testDeletedDocs() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); for(int i=0;i<19;i++) { Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); @@ -101,7 +102,7 @@ public class TestCheckIndex extends LuceneTestCase { // LUCENE-4221: we have to let these thru, for now public void testBogusTermVectors() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setStoreTermVectors(true); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index d07c9fd2025..e8466b7439f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -830,8 +830,7 @@ public class TestCodecs extends LuceneTestCase { // returns 1 in docsEnum.freq() Directory dir = newDirectory(); Random random = random(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))); // we don't need many documents to assert this, but don't use one document either int numDocs = atLeast(random, 50); for (int i = 0; i < numDocs; i++) { @@ -857,7 +856,7 @@ public class TestCodecs extends LuceneTestCase { public void testDisableImpersonation() throws Exception { Codec[] oldCodecs = new Codec[] { new Lucene40RWCodec(), new Lucene41RWCodec(), new Lucene42RWCodec() }; Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(oldCodecs[random().nextInt(oldCodecs.length)]); IndexWriter writer = new IndexWriter(dir, conf); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java index 21da8f71eba..b0b7cf348f0 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java @@ -83,7 +83,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { FailOnlyOnFlush failure = new FailOnlyOnFlush(); directory.failOn(failure); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); Document doc = new Document(); Field idField = newStringField("id", "", Field.Store.YES); doc.add(idField); @@ -139,9 +140,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { // merging of segments with and without deletes at the // start: mp.setMinMergeDocs(1000); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMergePolicy(mp)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(mp)); Document doc = new Document(); Field idField = newStringField("id", "", Field.Store.YES); @@ -177,9 +177,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { public void testNoExtraFiles() throws IOException { Directory directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); for(int iter=0;iter<7;iter++) { if (VERBOSE) { @@ -196,9 +195,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles"); // Reopen - writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2)); + writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2)); } writer.shutdown(); @@ -214,7 +212,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { IndexWriter writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). // Force excessive merging: setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(100)) @@ -248,7 +246,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { // Reopen writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setOpenMode(OpenMode.APPEND). setMergePolicy(newLogMergePolicy(100)). // Force excessive merging: @@ -352,7 +350,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase { if (d instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper)d).setThrottling(MockDirectoryWrapper.Throttling.NEVER); } - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMaxBufferedDocs(5); CountDownLatch atLeastOneMerge = new CountDownLatch(1); iwc.setMergeScheduler(new TrackingCMS(atLeastOneMerge)); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java b/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java index c196171b4f8..91c9a4635e3 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestConsistentFieldNumbers.java @@ -37,7 +37,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { public void testSameFieldNumbersAcrossSegments() throws Exception { for (int i = 0; i < 2; i++) { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d1 = new Document(); d1.add(new StringField("f1", "first field", Field.Store.YES)); @@ -46,7 +47,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { if (i == 1) { writer.shutdown(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); } else { writer.commit(); } @@ -76,7 +78,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { assertEquals("f3", fis2.fieldInfo(2).name); assertEquals("f4", fis2.fieldInfo(3).name); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.forceMerge(1); writer.shutdown(); @@ -100,7 +102,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { public void testAddIndexes() throws Exception { Directory dir1 = newDirectory(); Directory dir2 = newDirectory(); - IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d1 = new Document(); d1.add(new TextField("f1", "first field", Field.Store.YES)); @@ -108,7 +111,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { writer.addDocument(d1); writer.shutdown(); - writer = new IndexWriter(dir2, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d2 = new Document(); FieldType customType2 = new FieldType(TextField.TYPE_STORED); @@ -121,7 +125,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { writer.shutdown(); - writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); writer.addIndexes(dir2); writer.shutdown(); @@ -149,9 +154,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { for (int i = 0; i < numIters; i++) { Directory dir = newDirectory(); { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( - NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d = new Document(); d.add(new TextField("f1", "d1 first field", Field.Store.YES)); d.add(new TextField("f2", "d1 second field", Field.Store.YES)); @@ -167,8 +171,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d = new Document(); d.add(new TextField("f1", "d2 first field", Field.Store.YES)); d.add(new StoredField("f3", new byte[] { 1, 2, 3 })); @@ -187,8 +191,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { } { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); Document d = new Document(); d.add(new TextField("f1", "d3 first field", Field.Store.YES)); d.add(new TextField("f2", "d3 second field", Field.Store.YES)); @@ -212,8 +216,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { } { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); writer.deleteDocuments(new Term("f1", "d1")); // nuke the first segment entirely so that the segment with gaps is // loaded first! @@ -221,9 +225,9 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { writer.shutdown(); } - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy( - new LogByteSizeMergePolicy()).setInfoStream(new FailOnNonBulkMergesInfoStream())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(new LogByteSizeMergePolicy()) + .setInfoStream(new FailOnNonBulkMergesInfoStream())); writer.forceMerge(1); writer.shutdown(); @@ -251,7 +255,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase { } Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for (int i = 0; i < NUM_DOCS; i++) { Document d = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCrash.java b/lucene/core/src/test/org/apache/lucene/index/TestCrash.java index dc76df03b2b..7001ec686f4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCrash.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCrash.java @@ -37,7 +37,7 @@ public class TestCrash extends LuceneTestCase { private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException { dir.setLockFactory(NoLockFactory.getNoLockFactory()); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random)) .setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler())); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); if (initialCommit) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java b/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java index b1bf5eee9ca..24130205b08 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCrashCausesCorruptIndex.java @@ -65,7 +65,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase { // NOTE: cannot use RandomIndexWriter because it // sometimes commits: IndexWriter indexWriter = new IndexWriter(crashAfterCreateOutput, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); indexWriter.addDocument(getDocument()); // writes segments_1: @@ -96,7 +96,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase { // it doesn't know what to do with the created but empty // segments_2 file IndexWriter indexWriter = new IndexWriter(realDirectory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); // currently the test fails above. // however, to test the fix, the following lines should pass as well. diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java index dbeed659f64..d0982271146 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCustomNorms.java @@ -45,8 +45,7 @@ public class TestCustomNorms extends LuceneTestCase { MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); - IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, - analyzer); + IndexWriterConfig config = newIndexWriterConfig(analyzer); Similarity provider = new MySimProvider(); config.setSimilarity(provider); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java index 4e10c14b691..a84105af62d 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java @@ -221,8 +221,7 @@ public class TestDeletionPolicy extends LuceneTestCase { final double SECONDS = 2.0; Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())) + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) .setIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS)); MergePolicy mp = conf.getMergePolicy(); mp.setNoCFSRatio(1.0); @@ -240,9 +239,9 @@ public class TestDeletionPolicy extends LuceneTestCase { // Record last time when writer performed deletes of // past commits lastDeleteTime = System.currentTimeMillis(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode( - OpenMode.APPEND).setIndexDeletionPolicy(policy); + conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); mp.setNoCFSRatio(1.0); writer = new IndexWriter(dir, conf); @@ -316,8 +315,7 @@ public class TestDeletionPolicy extends LuceneTestCase { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) .setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir)) .setMaxBufferedDocs(10) .setMergeScheduler(new SerialMergeScheduler()); @@ -337,9 +335,9 @@ public class TestDeletionPolicy extends LuceneTestCase { r.close(); } if (needsMerging) { - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode( - OpenMode.APPEND).setIndexDeletionPolicy(policy); + conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); if (VERBOSE) { @@ -384,10 +382,9 @@ public class TestDeletionPolicy extends LuceneTestCase { // Open & close a writer and assert that it // actually removed something: int preCount = dir.listAll().length; - writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setOpenMode( - OpenMode.APPEND).setIndexDeletionPolicy(policy)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setIndexDeletionPolicy(policy)); writer.shutdown(); int postCount = dir.listAll().length; assertTrue(postCount < preCount); @@ -406,7 +403,7 @@ public class TestDeletionPolicy extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir)). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(10)) @@ -429,7 +426,8 @@ public class TestDeletionPolicy extends LuceneTestCase { assertTrue(lastCommit != null); // Now add 1 doc and merge - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(policy)); addDoc(writer); assertEquals(11, writer.numDocs()); writer.forceMerge(1); @@ -438,8 +436,9 @@ public class TestDeletionPolicy extends LuceneTestCase { assertEquals(6, DirectoryReader.listCommits(dir).size()); // Now open writer on the commit just before merge: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(policy) + .setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); // Should undo our rollback: @@ -451,8 +450,9 @@ public class TestDeletionPolicy extends LuceneTestCase { assertEquals(11, r.numDocs()); r.close(); - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(policy) + .setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); // Commits the rollback: writer.shutdown(); @@ -468,7 +468,8 @@ public class TestDeletionPolicy extends LuceneTestCase { r.close(); // Re-merge - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(policy)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(policy)); writer.forceMerge(1); writer.shutdown(); @@ -479,7 +480,8 @@ public class TestDeletionPolicy extends LuceneTestCase { // Now open writer on the commit just before merging, // but this time keeping only the last commit: - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexCommit(lastCommit)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexCommit(lastCommit)); assertEquals(10, writer.numDocs()); // Reader still sees fully merged index, because writer @@ -512,8 +514,7 @@ public class TestDeletionPolicy extends LuceneTestCase { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) .setIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy()) .setMaxBufferedDocs(10); @@ -526,8 +527,9 @@ public class TestDeletionPolicy extends LuceneTestCase { } writer.shutdown(); - conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy); + conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setIndexDeletionPolicy(policy); mp = conf.getMergePolicy(); mp.setNoCFSRatio(1.0); writer = new IndexWriter(dir, conf); @@ -563,8 +565,7 @@ public class TestDeletionPolicy extends LuceneTestCase { KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N); for(int j=0;j() {{ put("key", "value"); }}); @@ -2132,7 +2141,8 @@ public class TestIndexWriter extends LuceneTestCase { writer.shutdown(); // validate that it's also visible when opening a new IndexWriter - writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(null) + .setOpenMode(OpenMode.APPEND)); assertEquals("value", writer.getCommitData().get("key")); writer.shutdown(); @@ -2141,7 +2151,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testNullAnalyzer() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwConf = newIndexWriterConfig(null); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); // add 3 good docs for (int i = 0; i < 3; i++) { @@ -2273,8 +2283,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testIterableFieldThrowsException() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); int iters = atLeast(100); int docCount = 0; int docId = 0; @@ -2335,8 +2344,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testIterableThrowsException() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); int iters = atLeast(100); int docCount = 0; int docId = 0; @@ -2388,8 +2396,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testIterableThrowsException2() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); try { w.addDocuments(new Iterable() { @Override @@ -2459,7 +2466,7 @@ public class TestIndexWriter extends LuceneTestCase { for(int i=0;i<6;i++) { BaseDirectoryWrapper dir = newDirectory(); dir.createOutput("segments_0", IOContext.DEFAULT).close(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); int mode = i/2; if (mode == 0) { iwc.setOpenMode(OpenMode.CREATE); @@ -2504,7 +2511,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testHasUncommittedChanges() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); assertTrue(writer.hasUncommittedChanges()); // this will be true because a commit will create an empty index Document doc = new Document(); doc.add(newTextField("myfield", "a b c", Field.Store.NO)); @@ -2542,7 +2549,7 @@ public class TestIndexWriter extends LuceneTestCase { assertFalse(writer.hasUncommittedChanges()); writer.shutdown(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); assertFalse(writer.hasUncommittedChanges()); writer.addDocument(doc); assertTrue(writer.hasUncommittedChanges()); @@ -2553,7 +2560,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testMergeAllDeleted() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); final SetOnce iwRef = new SetOnce<>(); iwc.setInfoStream(new RandomIndexWriter.TestPointInfoStream(iwc.getInfoStream(), new RandomIndexWriter.TestPoint() { @Override @@ -2607,7 +2614,7 @@ public class TestIndexWriter extends LuceneTestCase { Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!!1 - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); @@ -2627,7 +2634,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testDoubleClose() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); w.addDocument(doc); @@ -2639,7 +2646,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testRollbackThenClose() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); w.addDocument(doc); @@ -2651,7 +2658,7 @@ public class TestIndexWriter extends LuceneTestCase { public void testCloseThenRollback() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); w.addDocument(doc); @@ -2665,7 +2672,7 @@ public class TestIndexWriter extends LuceneTestCase { Directory dir = newDirectory(); // If version is < 50 IW.close should throw an exception // on uncommitted changes: - IndexWriterConfig iwc = newIndexWriterConfig(Version.LUCENE_4_8, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random())); IndexWriter w = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); @@ -2689,7 +2696,7 @@ public class TestIndexWriter extends LuceneTestCase { // If version is < 50 IW.close should throw an exception // on still-running merges: - IndexWriterConfig iwc = newIndexWriterConfig(Version.LUCENE_4_8, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random())); LogDocMergePolicy mp = new LogDocMergePolicy(); mp.setMergeFactor(2); iwc.setMergePolicy(mp); @@ -2764,7 +2771,7 @@ public class TestIndexWriter extends LuceneTestCase { // Allow writing to same file more than once: dir.setPreventDoubleWrite(false); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); LogMergePolicy lmp = new LogDocMergePolicy(); lmp.setMergeFactor(2); iwc.setMergePolicy(lmp); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java index 06cb0c6931a..b4f4388b9a0 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java @@ -42,7 +42,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { */ public void testCommitOnClose() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for (int i = 0; i < 14; i++) { TestIndexWriter.addDoc(writer); } @@ -57,7 +57,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { reader = DirectoryReader.open(dir); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for(int i=0;i<3;i++) { for(int j=0;j<11;j++) { TestIndexWriter.addDoc(writer); @@ -93,7 +93,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { */ public void testCommitOnCloseAbort() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(10)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(10)); for (int i = 0; i < 14; i++) { TestIndexWriter.addDoc(writer); } @@ -106,8 +107,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { assertEquals("first number of hits", 14, hits.length); reader.close(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setMaxBufferedDocs(10)); for(int j=0;j<17;j++) { TestIndexWriter.addDoc(writer); } @@ -133,8 +135,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { // Now make sure we can re-open the index, add docs, // and all is good: - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND) + .setMaxBufferedDocs(10)); // On abort, writer in fact may write to the same // segments_N file: @@ -204,10 +207,10 @@ public class TestIndexWriterCommit extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer). - setMaxBufferedDocs(10). - setReaderPooling(false). - setMergePolicy(newLogMergePolicy(10)) + newIndexWriterConfig(analyzer) + .setMaxBufferedDocs(10) + .setReaderPooling(false) + .setMergePolicy(newLogMergePolicy(10)) ); for(int j=0;j<30;j++) { TestIndexWriter.addDocWithIndex(writer, j); @@ -219,12 +222,12 @@ public class TestIndexWriterCommit extends LuceneTestCase { long startDiskUsage = dir.getMaxUsedSizeInBytes(); writer = new IndexWriter( dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer) - .setOpenMode(OpenMode.APPEND). - setMaxBufferedDocs(10). - setMergeScheduler(new SerialMergeScheduler()). - setReaderPooling(false). - setMergePolicy(newLogMergePolicy(10)) + newIndexWriterConfig(analyzer) + .setOpenMode(OpenMode.APPEND) + .setMaxBufferedDocs(10) + .setMergeScheduler(new SerialMergeScheduler()) + .setReaderPooling(false) + .setMergePolicy(newLogMergePolicy(10)) ); for(int j=0;j<1470;j++) { @@ -268,16 +271,17 @@ public class TestIndexWriterCommit extends LuceneTestCase { } IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDocs(10). - setMergePolicy(newLogMergePolicy(10)) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(10) + .setMergePolicy(newLogMergePolicy(10)) ); for(int j=0;j<17;j++) { TestIndexWriter.addDocWithIndex(writer, j); } writer.shutdown(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND)); writer.forceMerge(1); // Open a reader before closing (commiting) the writer: @@ -302,7 +306,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { if (VERBOSE) { System.out.println("TEST: do real full merge"); } - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.APPEND)); writer.forceMerge(1); writer.shutdown(); @@ -327,8 +332,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { final int NUM_THREADS = 5; final double RUN_SEC = 0.5; final Directory dir = newDirectory(); - final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(newLogMergePolicy())); TestUtil.reduceOpenFiles(w.w); w.commit(); final AtomicBoolean failed = new AtomicBoolean(); @@ -383,9 +388,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDocs(2). - setMergePolicy(newLogMergePolicy(5)) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergePolicy(newLogMergePolicy(5)) ); writer.commit(); @@ -420,7 +425,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { public void testFutureCommit() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); Document doc = new Document(); w.addDocument(doc); @@ -447,7 +453,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { assertNotNull(commit); - w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).setIndexCommit(commit)); + w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE) + .setIndexCommit(commit)); assertEquals(1, w.numDocs()); @@ -476,7 +484,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { // changed since LUCENE-2386, where before IW would always commit on a fresh // new index. Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); try { DirectoryReader.listCommits(dir); fail("listCommits should have thrown an exception over empty index"); @@ -495,9 +503,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDocs(2). - setMergePolicy(newLogMergePolicy(5)) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergePolicy(newLogMergePolicy(5)) ); writer.commit(); @@ -554,9 +562,9 @@ public class TestIndexWriterCommit extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDocs(2). - setMergePolicy(newLogMergePolicy(5)) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergePolicy(newLogMergePolicy(5)) ); writer.commit(); @@ -580,7 +588,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { reader.close(); reader2.close(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for (int i = 0; i < 17; i++) TestIndexWriter.addDoc(writer); @@ -606,7 +614,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { public void testPrepareCommitNoChanges() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.prepareCommit(); writer.commit(); writer.shutdown(); @@ -620,7 +628,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { // LUCENE-1382 public void testCommitUserData() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); for(int j=0;j<17;j++) TestIndexWriter.addDoc(w); w.shutdown(); @@ -630,7 +639,8 @@ public class TestIndexWriterCommit extends LuceneTestCase { assertEquals(0, r.getIndexCommit().getUserData().size()); r.close(); - w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); + w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); for(int j=0;j<17;j++) TestIndexWriter.addDoc(w); Map data = new HashMap<>(); @@ -642,7 +652,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { assertEquals("test1", r.getIndexCommit().getUserData().get("label")); r.close(); - w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); w.forceMerge(1); w.shutdown(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java index d1539894c80..3071d3f5cf2 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java @@ -143,7 +143,7 @@ public class TestIndexWriterConfig extends LuceneTestCase { public void testReuse() throws Exception { Directory dir = newDirectory(); // test that IWC cannot be reused across two IWs - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); new RandomIndexWriter(random(), dir, conf).shutdown(); // this should fail diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java index db6060583ed..9c328fb990a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java @@ -58,8 +58,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { String[] text = { "Amsterdam", "Venice" }; Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDeleteTerms(1)); FieldType custom1 = new FieldType(); custom1.setStored(true); @@ -97,8 +97,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { public void testNonRAMDelete() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; int value = 100; @@ -130,8 +130,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { public void testMaxBufferedDeletes() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(1)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDeleteTerms(1)); writer.addDocument(new Document()); writer.deleteDocuments(new Term("foobar", "1")); @@ -149,8 +149,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { System.out.println("TEST: t=" + t); } Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(4) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(4) .setMaxBufferedDeleteTerms(4)); int id = 0; int value = 100; @@ -187,8 +187,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { // test when delete terms apply to both disk and ram segments public void testBothDeletes() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(100) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(100) .setMaxBufferedDeleteTerms(100)); int id = 0; @@ -221,8 +221,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { // test that batched delete terms are flushed together public void testBatchDeletes() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -264,8 +264,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { // test deleteAll() public void testDeleteAll() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -373,8 +373,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { // test rollback of deleteAll() public void testDeleteAllRollback() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -409,8 +409,8 @@ public class TestIndexWriterDelete extends LuceneTestCase { // test deleteAll() w/ near real-time reader public void testDeleteAllNRT() throws IOException { Directory dir = newDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDocs(2) + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDocs(2) .setMaxBufferedDeleteTerms(2)); int id = 0; @@ -499,7 +499,7 @@ public class TestIndexWriterDelete extends LuceneTestCase { MockDirectoryWrapper startDir = newMockDirectory(); // TODO: find the resource leak that only occurs sometimes here. startDir.setNoDeleteOpenFile(false); - IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + IndexWriter writer = new IndexWriter(startDir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); for (int i = 0; i < 157; i++) { Document d = new Document(); d.add(newStringField("id", Integer.toString(i), Field.Store.YES)); @@ -525,8 +525,7 @@ public class TestIndexWriterDelete extends LuceneTestCase { dir.setPreventDoubleWrite(false); dir.setAllowRandomFileNotFoundException(false); IndexWriter modifier = new IndexWriter(dir, - newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) .setMaxBufferedDocs(1000) .setMaxBufferedDeleteTerms(1000) .setMergeScheduler(new ConcurrentMergeScheduler())); @@ -766,8 +765,10 @@ public class TestIndexWriterDelete extends LuceneTestCase { String[] text = { "Amsterdam", "Venice" }; MockDirectoryWrapper dir = newMockDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMaxBufferedDeleteTerms(2).setReaderPooling(false).setMergePolicy(newLogMergePolicy())); + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + .setMaxBufferedDeleteTerms(2) + .setReaderPooling(false) + .setMergePolicy(newLogMergePolicy())); MergePolicy lmp = modifier.getConfig().getMergePolicy(); lmp.setNoCFSRatio(1.0); @@ -891,7 +892,7 @@ public class TestIndexWriterDelete extends LuceneTestCase { String[] text = { "Amsterdam", "Venice" }; MockDirectoryWrapper dir = newMockDirectory(); - IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + IndexWriter modifier = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); modifier.commit(); dir.failOn(failure.reset()); @@ -982,7 +983,10 @@ public class TestIndexWriterDelete extends LuceneTestCase { return new TokenStreamComponents(new MockTokenizer(MockTokenizer.WHITESPACE, true)); } }; - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setRAMBufferSizeMB(1.0).setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer) + .setRAMBufferSizeMB(1.0) + .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH)); Document doc = new Document(); doc.add(newTextField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO)); int num = atLeast(3); @@ -1023,8 +1027,11 @@ public class TestIndexWriterDelete extends LuceneTestCase { // ever call commit() for this test: // note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999) IndexWriter w = new IndexWriter(dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setRAMBufferSizeMB(0.1f).setMaxBufferedDocs(1000).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false)); + newIndexWriterConfig(new MockAnalyzer(random())) + .setRAMBufferSizeMB(0.1f) + .setMaxBufferedDocs(1000) + .setMergePolicy(NoMergePolicy.INSTANCE) + .setReaderPooling(false)); int count = 0; while(true) { Document doc = new Document(); @@ -1069,8 +1076,12 @@ public class TestIndexWriterDelete extends LuceneTestCase { // ever call commit() for this test: final int flushAtDelCount = atLeast(1020); IndexWriter w = new IndexWriter(dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDeleteTerms(flushAtDelCount).setMaxBufferedDocs(1000).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false)); + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDeleteTerms(flushAtDelCount) + .setMaxBufferedDocs(1000) + .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setMergePolicy(NoMergePolicy.INSTANCE) + .setReaderPooling(false)); int count = 0; while(true) { Document doc = new Document(); @@ -1110,8 +1121,11 @@ public class TestIndexWriterDelete extends LuceneTestCase { final AtomicBoolean closing = new AtomicBoolean(); final AtomicBoolean sawAfterFlush = new AtomicBoolean(); IndexWriter w = new IndexWriter(dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setRAMBufferSizeMB(0.5).setMaxBufferedDocs(-1).setMergePolicy(NoMergePolicy.INSTANCE).setReaderPooling(false)) { + newIndexWriterConfig(new MockAnalyzer(random())) + .setRAMBufferSizeMB(0.5) + .setMaxBufferedDocs(-1) + .setMergePolicy(NoMergePolicy.INSTANCE) + .setReaderPooling(false)) { @Override public void doAfterFlush() { assertTrue("only " + docsInSegment.get() + " in segment", closing.get() || docsInSegment.get() >= 7); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java index e0e33358ede..d089d65aa58 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java @@ -248,8 +248,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase { MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. - IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer) - .setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1()); + IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(analyzer) + .setRAMBufferSizeMB(0.1) + .setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1()); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); //writer.setMaxBufferedDocs(10); if (VERBOSE) { @@ -291,8 +292,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase { Directory dir = newDirectory(); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases. - IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer) - .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1()); + IndexWriter writer = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(analyzer) + .setRAMBufferSizeMB(0.2) + .setMergeScheduler(new ConcurrentMergeScheduler()), new TestPoint1()); ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions(); //writer.setMaxBufferedDocs(10); writer.commit(); @@ -372,7 +374,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testExceptionDocumentsWriterInit() throws IOException { Directory dir = newDirectory(); TestPoint2 testPoint = new TestPoint2(); - IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint); + IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint); Document doc = new Document(); doc.add(newTextField("field", "a field", Field.Store.YES)); w.addDocument(doc); @@ -390,7 +392,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase { // LUCENE-1208 public void testExceptionJustBeforeFlush() throws IOException { Directory dir = newDirectory(); - IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2), new TestPoint1()); + IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2), + new TestPoint1()); Document doc = new Document(); doc.add(newTextField("field", "a field", Field.Store.YES)); w.addDocument(doc); @@ -433,8 +438,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase { // LUCENE-1210 public void testExceptionOnMergeInit() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergePolicy(newLogMergePolicy()); ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler(); cms.setSuppressExceptions(); conf.setMergeScheduler(cms); @@ -460,7 +466,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { // LUCENE-1072 public void testExceptionFromTokenStream() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new Analyzer() { + IndexWriterConfig conf = newIndexWriterConfig(new Analyzer() { @Override public TokenStreamComponents createComponents(String fieldName) { @@ -578,7 +584,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { failure.setDoFail(); dir.failOn(failure); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2)); Document doc = new Document(); String contents = "aa bb cc dd ee ff gg hh ii jj kk"; doc.add(newTextField("content", contents, Field.Store.NO)); @@ -615,7 +622,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { System.out.println("TEST: cycle i=" + i); } Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer) + .setMergePolicy(newLogMergePolicy())); // don't allow a sudden merge to clean up the deleted // doc below: @@ -669,8 +677,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { } reader.close(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - analyzer).setMaxBufferedDocs(10)); + writer = new IndexWriter(dir, newIndexWriterConfig(analyzer) + .setMaxBufferedDocs(10)); doc = new Document(); doc.add(newField("contents", "here are some contents", DocCopyIterator.custom5)); for(int j=0;j<17;j++) @@ -712,8 +720,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { Directory dir = newDirectory(); { - final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(-1) + final IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer) + .setMaxBufferedDocs(-1) .setMergePolicy(NoMergePolicy.INSTANCE)); // don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc. final int finalI = i; @@ -781,8 +789,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { assertEquals(NUM_THREAD*NUM_ITER, numDel); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer) + .setMaxBufferedDocs(10)); Document doc = new Document(); doc.add(newField("contents", "here are some contents", DocCopyIterator.custom5)); for(int j=0;j<17;j++) @@ -843,10 +851,10 @@ public class TestIndexWriterExceptions extends LuceneTestCase { IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMaxBufferedDocs(2). - setMergeScheduler(new ConcurrentMergeScheduler()). - setMergePolicy(newLogMergePolicy(5)) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergeScheduler(new ConcurrentMergeScheduler()) + .setMergePolicy(newLogMergePolicy(5)) ); failure.setDoFail(); @@ -931,8 +939,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { for (FailOnlyInCommit failure : failures) { MockDirectoryWrapper dir = newMockDirectory(); dir.setFailOnCreateOutput(false); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(newTextField("field", "a field", Field.Store.YES)); w.addDocument(doc); @@ -955,7 +962,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testForceMergeExceptions() throws IOException { Directory startDir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setMergePolicy(newLogMergePolicy()); ((LogMergePolicy) conf.getMergePolicy()).setMergeFactor(100); IndexWriter w = new IndexWriter(startDir, conf); for(int i=0;i<27;i++) @@ -968,7 +977,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { System.out.println("TEST: iter " + i); } MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new RAMDirectory(startDir, newIOContext(random()))); - conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()); + conf = newIndexWriterConfig(new MockAnalyzer(random())) + .setMergeScheduler(new ConcurrentMergeScheduler()); ((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions(); w = new IndexWriter(dir, conf); dir.setRandomIOExceptionRate(0.5); @@ -991,7 +1001,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { final AtomicBoolean thrown = new AtomicBoolean(false); final Directory dir = newDirectory(); final IndexWriter writer = new IndexWriter(dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setInfoStream(new InfoStream() { + newIndexWriterConfig(new MockAnalyzer(random())) + .setInfoStream(new InfoStream() { @Override public void message(String component, final String message) { if (message.startsWith("now flush at shutdown") && thrown.compareAndSet(false, true)) { @@ -1035,7 +1046,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testRollbackExceptionHang() throws Throwable { Directory dir = newDirectory(); TestPoint4 testPoint = new TestPoint4(); - IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())), testPoint); + IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())), testPoint); addDoc(w); @@ -1058,7 +1069,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1090,7 +1101,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { reader.close(); // should remove the corrumpted segments_N - new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)).shutdown(); + new IndexWriter(dir, newIndexWriterConfig(null)).shutdown(); dir.close(); } @@ -1103,7 +1114,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1152,8 +1163,9 @@ public class TestIndexWriterExceptions extends LuceneTestCase { writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). - setMergePolicy(newLogMergePolicy(true)).setUseCompoundFile(true) + newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(newLogMergePolicy(true)) + .setUseCompoundFile(true) ); MergePolicy lmp = writer.getConfig().getMergePolicy(); // Force creation of CFS: @@ -1206,7 +1218,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { IndexWriter writer = null; - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); // add 100 documents for (int i = 0; i < 100; i++) { @@ -1244,7 +1256,8 @@ public class TestIndexWriterExceptions extends LuceneTestCase { reader.close(); try { - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE)); } catch (Exception e) { e.printStackTrace(System.out); fail("writer failed to open on a crashed index"); @@ -1268,8 +1281,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { for (int j = 0; j < num; j++) { for (FailOnTermVectors failure : failures) { MockDirectoryWrapper dir = newMockDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); dir.failOn(failure); int numDocs = 10 + random().nextInt(30); for (int i = 0; i < numDocs; i++) { @@ -1695,12 +1707,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testExceptionOnCtor() throws Exception { UOEDirectory uoe = new UOEDirectory(); Directory d = new MockDirectoryWrapper(random(), uoe); - IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(null)); iw.addDocument(new Document()); iw.shutdown(); uoe.doFail = true; try { - new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + new IndexWriter(d, newIndexWriterConfig(null)); fail("should have gotten a UOE"); } catch (UnsupportedOperationException expected) { } @@ -1711,7 +1723,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testIllegalPositions() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); Token t1 = new Token("foo", 0, 3); t1.setPositionIncrement(Integer.MAX_VALUE); @@ -1734,7 +1746,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { public void testLegalbutVeryLargePositions() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); Token t1 = new Token("foo", 0, 3); t1.setPositionIncrement(Integer.MAX_VALUE-500); @@ -1940,7 +1952,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase { System.out.println("\nTEST: iter=" + iter + " numDocs=" + numDocs + " docBase=" + docBase + " delCount=" + deleteCount); } if (w == null) { - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); final MergeScheduler ms = iwc.getMergeScheduler(); if (ms instanceof ConcurrentMergeScheduler) { final ConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeScheduler() { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java index 9b337a5260b..5eaed563124 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions2.java @@ -90,7 +90,7 @@ public class TestIndexWriterExceptions2 extends LuceneTestCase { Codec inner = RANDOM_MULTIPLIER > 1 ? Codec.getDefault() : new AssertingCodec(); Codec codec = new CrankyCodec(inner, new Random(random().nextLong())); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); // just for now, try to keep this test reproducible conf.setMergeScheduler(new SerialMergeScheduler()); conf.setCodec(codec); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java index d2b123797b8..16108787235 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterForceMerge.java @@ -40,10 +40,10 @@ public class TestIndexWriterForceMerge extends LuceneTestCase { LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.setMinMergeDocs(1); ldmp.setMergeFactor(5); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2).setMergePolicy( - ldmp)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE) + .setMaxBufferedDocs(2) + .setMergePolicy(ldmp)); for(int j=0;j docs = new HashMap<>(); - IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB( - 0.1).setMaxBufferedDocs(maxBufferedDocs).setMergePolicy(newLogMergePolicy()), new YieldTestPoint()); + IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE) + .setRAMBufferSizeMB(0.1) + .setMaxBufferedDocs(maxBufferedDocs) + .setMergePolicy(newLogMergePolicy()), new YieldTestPoint()); w.commit(); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); lmp.setNoCFSRatio(0.0); @@ -197,10 +199,13 @@ public class TestStressIndexing2 extends LuceneTestCase { public Map indexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates, boolean doReaderPooling) throws IOException, InterruptedException { Map docs = new HashMap<>(); - IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE) - .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs).setIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates)) - .setReaderPooling(doReaderPooling).setMergePolicy(newLogMergePolicy()), new YieldTestPoint()); + IndexWriter w = RandomIndexWriter.mockIndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setOpenMode(OpenMode.CREATE) + .setRAMBufferSizeMB(0.1) + .setMaxBufferedDocs(maxBufferedDocs) + .setIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates)) + .setReaderPooling(doReaderPooling) + .setMergePolicy(newLogMergePolicy()), new YieldTestPoint()); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); lmp.setNoCFSRatio(0.0); lmp.setMergeFactor(mergeFactor); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java b/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java index ba7e3f5c08f..2c6360d6bb1 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestStressNRT.java @@ -106,7 +106,7 @@ public class TestStressNRT extends LuceneTestCase { Directory dir = newDirectory(); - final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.setDoRandomForceMergeAssert(false); writer.commit(); reader = DirectoryReader.open(dir); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java index 944b15e9dd8..75003b59118 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java @@ -91,9 +91,10 @@ public class TestTermVectorsReader extends LuceneTestCase { dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()). + newIndexWriterConfig(new MyAnalyzer()). setMaxBufferedDocs(-1). - setMergePolicy(newLogMergePolicy(false, 10)).setUseCompoundFile(false) + setMergePolicy(newLogMergePolicy(false, 10)) + .setUseCompoundFile(false) ); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java index 4de4c908be4..5523f9ed630 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsWriter.java @@ -43,8 +43,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1442 public void testDoubleOffsetCounting() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(StringField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -103,7 +102,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1442 public void testDoubleOffsetCounting2() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -138,7 +137,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1448 public void testEndOffsetPositionCharAnalyzer() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -174,7 +173,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { public void testEndOffsetPositionWithCachingTokenFilter() throws Exception { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer)); Document doc = new Document(); try (TokenStream stream = analyzer.tokenStream("field", "abcd ")) { stream.reset(); // TODO: weird to reset before wrapping with CachingTokenFilter... correct? @@ -213,8 +212,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1448 public void testEndOffsetPositionStopFilter() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -249,8 +247,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1448 public void testEndOffsetPositionStandard() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -294,8 +291,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1448 public void testEndOffsetPositionStandardEmptyField() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -333,8 +329,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1448 public void testEndOffsetPositionStandardEmptyField2() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); @@ -379,12 +374,11 @@ public class TestTermVectorsWriter extends LuceneTestCase { Directory dir = newDirectory(); for(int iter=0;iter<2;iter++) { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMaxBufferedDocs(2).setRAMBufferSizeMB( - IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( - new SerialMergeScheduler()).setMergePolicy( - new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setMergeScheduler(new SerialMergeScheduler()) + .setMergePolicy(new LogDocMergePolicy())); Document document = new Document(); FieldType customType = new FieldType(); @@ -415,11 +409,11 @@ public class TestTermVectorsWriter extends LuceneTestCase { } reader.close(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMaxBufferedDocs(2) + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy( - new LogDocMergePolicy())); + .setMergeScheduler(new SerialMergeScheduler()) + .setMergePolicy(new LogDocMergePolicy())); Directory[] indexDirs = {new MockDirectoryWrapper(random(), new RAMDirectory(dir, newIOContext(random())))}; writer.addIndexes(indexDirs); @@ -433,12 +427,11 @@ public class TestTermVectorsWriter extends LuceneTestCase { public void testTermVectorCorruption2() throws IOException { Directory dir = newDirectory(); for(int iter=0;iter<2;iter++) { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMaxBufferedDocs(2).setRAMBufferSizeMB( - IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( - new SerialMergeScheduler()).setMergePolicy( - new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setMergeScheduler(new SerialMergeScheduler()) + .setMergePolicy(new LogDocMergePolicy())); Document document = new Document(); @@ -474,11 +467,11 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1168 public void testTermVectorCorruption3() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setMaxBufferedDocs(2).setRAMBufferSizeMB( - IndexWriterConfig.DISABLE_AUTO_FLUSH).setMergeScheduler( - new SerialMergeScheduler()).setMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) + .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .setMergeScheduler(new SerialMergeScheduler()) + .setMergePolicy(new LogDocMergePolicy())); Document document = new Document(); FieldType customType = new FieldType(); @@ -496,11 +489,11 @@ public class TestTermVectorsWriter extends LuceneTestCase { writer.addDocument(document); writer.shutdown(); - writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMaxBufferedDocs(2) + writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMaxBufferedDocs(2) .setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) - .setMergeScheduler(new SerialMergeScheduler()).setMergePolicy( - new LogDocMergePolicy())); + .setMergeScheduler(new SerialMergeScheduler()) + .setMergePolicy(new LogDocMergePolicy())); for(int i=0;i<6;i++) writer.addDocument(document); @@ -519,8 +512,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1008 public void testNoTermVectorAfterTermVector() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document document = new Document(); FieldType customType2 = new FieldType(StringField.TYPE_NOT_STORED); customType2.setStoreTermVectors(true); @@ -550,8 +542,7 @@ public class TestTermVectorsWriter extends LuceneTestCase { // LUCENE-1010 public void testNoTermVectorAfterTermVectorMerge() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document document = new Document(); FieldType customType = new FieldType(StringField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermdocPerf.java b/lucene/core/src/test/org/apache/lucene/index/TestTermdocPerf.java index 9eae28e05eb..0218b692530 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermdocPerf.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermdocPerf.java @@ -89,10 +89,10 @@ public class TestTermdocPerf extends LuceneTestCase { doc.add(newStringField(field, val, Field.Store.NO)); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). - setOpenMode(OpenMode.CREATE). - setMaxBufferedDocs(100). - setMergePolicy(newLogMergePolicy(100)) + newIndexWriterConfig(analyzer) + .setOpenMode(OpenMode.CREATE) + .setMaxBufferedDocs(100) + .setMergePolicy(newLogMergePolicy(100)) ); for (int i=0; i data = new HashMap<>(); data.put("index", "Rolled back to 1-"+id); w.setCommitData(data); @@ -131,7 +131,8 @@ public class TestTransactionRollback extends LuceneTestCase { //Build index, of records 1 to 100, committing after each batch of 10 IndexDeletionPolicy sdp=new KeepAllDeletionPolicy(); - IndexWriter w=new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setIndexDeletionPolicy(sdp)); + IndexWriter w=new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setIndexDeletionPolicy(sdp)); for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) { Document doc=new Document(); @@ -212,7 +213,7 @@ public class TestTransactionRollback extends LuceneTestCase { for(int i=0;i<2;i++) { // Unless you specify a prior commit point, rollback // should not work: - new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) + new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())) .setIndexDeletionPolicy(new DeleteLastCommitPolicy())).shutdown(); IndexReader r = DirectoryReader.open(dir); assertEquals(100, r.numDocs()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java b/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java index a0d7dbe4c4d..b976d7cd602 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTransactions.java @@ -100,7 +100,7 @@ public class TestTransactions extends LuceneTestCase { IndexWriter writer1 = new IndexWriter( dir1, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setMaxBufferedDocs(3). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(2)) @@ -111,7 +111,7 @@ public class TestTransactions extends LuceneTestCase { // happen @ different times IndexWriter writer2 = new IndexWriter( dir2, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setMaxBufferedDocs(2). setMergeScheduler(new ConcurrentMergeScheduler()). setMergePolicy(newLogMergePolicy(3)) @@ -212,7 +212,7 @@ public class TestTransactions extends LuceneTestCase { } public void initIndex(Directory dir) throws Throwable { - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for(int j=0; j<7; j++) { Document d = new Document(); int n = random().nextInt(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java b/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java index ef774e9b911..546bbd7f5c7 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestUniqueTermCount.java @@ -46,7 +46,7 @@ public class TestUniqueTermCount extends LuceneTestCase { super.setUp(); dir = newDirectory(); MockAnalyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig config = newIndexWriterConfig(analyzer); config.setMergePolicy(newLogMergePolicy()); config.setSimilarity(new TestSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); diff --git a/lucene/core/src/test/org/apache/lucene/search/FuzzyTermOnShortTermsTest.java b/lucene/core/src/test/org/apache/lucene/search/FuzzyTermOnShortTermsTest.java index 7f4d672daf5..1c51befb49c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/FuzzyTermOnShortTermsTest.java +++ b/lucene/core/src/test/org/apache/lucene/search/FuzzyTermOnShortTermsTest.java @@ -83,7 +83,7 @@ public class FuzzyTermOnShortTermsTest extends LuceneTestCase { public static Directory getDirectory(Analyzer analyzer, String[] vals) throws IOException{ Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + newIndexWriterConfig(analyzer) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)).setMergePolicy(newLogMergePolicy())); for (String s : vals){ diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java index 4c563c055f8..ce50204c3fe 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java @@ -58,7 +58,7 @@ public class TestBoolean2 extends LuceneTestCase { @BeforeClass public static void beforeClass() throws Exception { directory = newDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(newTextField(field, docFields[i], Field.Store.NO)); @@ -92,7 +92,7 @@ public class TestBoolean2 extends LuceneTestCase { } while(docCount < 3000); RandomIndexWriter w = new RandomIndexWriter(random(), dir2, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); Document doc = new Document(); doc.add(newTextField("field2", "xxx", Field.Store.NO)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java index 795070349b2..e173f4906e2 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java @@ -161,7 +161,7 @@ public class TestBooleanOr extends LuceneTestCase { public void testBooleanScorerMax() throws IOException { Directory dir = newDirectory(); - RandomIndexWriter riw = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + RandomIndexWriter riw = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); int docCount = atLeast(10000); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java index a561c16b895..93f3bff633c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java @@ -56,7 +56,7 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase { super.setUp(); analyzer = new MockAnalyzer(random()); dir = newDirectory(); - IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig config = newIndexWriterConfig(analyzer); config.setMergePolicy(newLogMergePolicy()); // we will use docids to validate RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); writer.addDocument(doc("lucene", "lucene is a very popular search engine library")); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java index ee2b1df12ef..9841d967b6a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java @@ -277,7 +277,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { RandomIndexWriter writer = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setMergeScheduler(new SerialMergeScheduler()). // asserts below requires no unexpected merges: setMergePolicy(newLogMergePolicy(10)) diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java b/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java index 00e4af8a73a..4c0520bb927 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestConjunctions.java @@ -50,7 +50,7 @@ public class TestConjunctions extends LuceneTestCase { super.setUp(); analyzer = new MockAnalyzer(random()); dir = newDirectory(); - IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig config = newIndexWriterConfig(analyzer); config.setMergePolicy(newLogMergePolicy()); // we will use docids to validate RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); writer.addDocument(doc("lucene", "lucene is a very popular search engine library")); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java index 7c9003b5d16..f9ada23a5ba 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestControlledRealTimeReopenThread.java @@ -303,7 +303,7 @@ public class TestControlledRealTimeReopenThread extends ThreadedIndexingAndSearc * LUCENE-3528 - NRTManager hangs in certain situations */ public void testThreadStarvationNoDeleteNRTReader() throws IOException, InterruptedException { - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMergePolicy(NoMergePolicy.INSTANCE); Directory d = newDirectory(); final CountDownLatch latch = new CountDownLatch(1); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java index 7b926051966..d19eb761e7a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java @@ -101,8 +101,8 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { index = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), index, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())) - .setSimilarity(sim).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())) + .setSimilarity(sim).setMergePolicy(newLogMergePolicy())); // hed is the most important field, dek is secondary diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java index a1137e20f3c..fd280ab279c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDocBoost.java @@ -36,7 +36,7 @@ public class TestDocBoost extends LuceneTestCase { public void testDocBoost() throws Exception { Directory store = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), store, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random(), store, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Field f1 = newTextField("field", "word", Field.Store.YES); Field f2 = newTextField("field", "word", Field.Store.YES); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRangeFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRangeFilter.java index 4d38f81979f..b817b4f3f63 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRangeFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRangeFilter.java @@ -54,7 +54,7 @@ public class TestDocTermOrdsRangeFilter extends LuceneTestCase { dir = newDirectory(); fieldName = random().nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); List terms = new ArrayList<>(); int num = atLeast(200); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRewriteMethod.java b/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRewriteMethod.java index 9dd845fdf7f..a4d25dbdb9d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRewriteMethod.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDocTermOrdsRewriteMethod.java @@ -56,7 +56,7 @@ public class TestDocTermOrdsRewriteMethod extends LuceneTestCase { dir = newDirectory(); fieldName = random().nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); List terms = new ArrayList<>(); int num = atLeast(200); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java index 0580da56b31..736aebe6cf2 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java @@ -40,7 +40,7 @@ public class TestElevationComparator extends LuceneTestCase { Directory directory = newDirectory(); IndexWriter writer = new IndexWriter( directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setMaxBufferedDocs(2). setMergePolicy(newLogMergePolicy(1000)). setSimilarity(new DefaultSimilarity()) diff --git a/lucene/core/src/test/org/apache/lucene/search/TestExplanations.java b/lucene/core/src/test/org/apache/lucene/search/TestExplanations.java index 35ddf0c2d58..41bf085fa7e 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestExplanations.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestExplanations.java @@ -71,7 +71,7 @@ public class TestExplanations extends LuceneTestCase { @BeforeClass public static void beforeClassTestExplanations() throws Exception { directory = newDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); for (int i = 0; i < docFields.length; i++) { Document doc = new Document(); doc.add(newStringField(KEY, ""+i, Field.Store.NO)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java index e22ab7d1399..3a2094f3e2d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheRangeFilter.java @@ -436,7 +436,7 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter { @Test public void testSparseIndex() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); for (int d = -20; d <= 20; d++) { Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFieldValueFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestFieldValueFilter.java index 37d36755324..a38bfac5732 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFieldValueFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFieldValueFilter.java @@ -40,7 +40,7 @@ public class TestFieldValueFilter extends LuceneTestCase { public void testFieldValueFilterNoValue() throws IOException { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); int docs = atLeast(10); int[] docStates = buildIndex(writer, docs); int numDocsNoValue = 0; @@ -68,7 +68,7 @@ public class TestFieldValueFilter extends LuceneTestCase { public void testFieldValueFilter() throws IOException { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); int docs = atLeast(10); int[] docStates = buildIndex(writer, docs); int numDocsWithValue = 0; diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java index 77cf62d8c43..b4c507803ef 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java @@ -59,7 +59,7 @@ public class TestFilteredQuery extends LuceneTestCase { public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter (random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter (random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add (newTextField("field", "one two three four five", Field.Store.YES)); @@ -402,7 +402,7 @@ public class TestFilteredQuery extends LuceneTestCase { public void testQueryFirstFilterStrategy() throws IOException { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); int numDocs = atLeast(50); int totalDocsWithZero = 0; for (int i = 0; i < numDocs; i++) { @@ -486,7 +486,7 @@ public class TestFilteredQuery extends LuceneTestCase { */ public void testLeapFrogStrategy() throws IOException { Directory directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter (random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + RandomIndexWriter writer = new RandomIndexWriter (random(), directory, newIndexWriterConfig(new MockAnalyzer(random()))); int numDocs = atLeast(50); int totalDocsWithZero = 0; for (int i = 0; i < numDocs; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java b/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java index 11ca424a5cd..c758785938b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java @@ -47,11 +47,11 @@ public class TestFilteredSearch extends LuceneTestCase { Directory directory = newDirectory(); int[] filterBits = {1, 36}; SimpleDocIdSetFilter filter = new SimpleDocIdSetFilter(filterBits); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); searchFiltered(writer, directory, filter, enforceSingleSegment); // run the test on more than one segment enforceSingleSegment = false; - writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy())); + writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergePolicy(newLogMergePolicy())); // we index 60 docs - this will create 6 segments searchFiltered(writer, directory, filter, enforceSingleSegment); directory.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java b/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java index de7586ebaf7..b4d1201d33b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestLiveFieldValues.java @@ -45,7 +45,7 @@ public class TestLiveFieldValues extends LuceneTestCase { public void test() throws Exception { Directory dir = newFSDirectory(createTempDir("livefieldupdates")); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); final IndexWriter w = new IndexWriter(dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java index 08fed8bf762..7fac7bc519c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java @@ -45,8 +45,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase { public void testQuery() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); addDoc("one", iw, 1f); addDoc("two", iw, 20f); addDoc("three four", iw, 300f); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java index 906d7b79140..5feccd7831f 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java @@ -467,7 +467,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase { private void doTestZeroPosIncrSloppy(Query q, int nExpected) throws IOException { Directory dir = newDirectory(); // random dir - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig cfg = newIndexWriterConfig(null); IndexWriter writer = new IndexWriter(dir, cfg); Document doc = new Document(); doc.add(new TextField("field", new CannedTokenStream(INCR_0_DOC_TOKENS))); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java index 455fde92a41..1ebec188906 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java @@ -57,7 +57,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { small = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), small, - newIndexWriterConfig(TEST_VERSION_CURRENT, + newIndexWriterConfig( new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)).setMergePolicy(newLogMergePolicy())); FieldType customType = new FieldType(TextField.TYPE_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java index b1e032ab958..c24d86cf4f4 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiThreadTermVectors.java @@ -40,7 +40,7 @@ public class TestMultiThreadTermVectors extends LuceneTestCase { public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); //writer.setNoCFSRatio(0.0); //writer.infoStream = System.out; FieldType customType = new FieldType(TextField.TYPE_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java index 082afedd58c..3f9db8edb29 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiValuedNumericRangeQuery.java @@ -42,7 +42,7 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase { public void testMultiValuedNRQ() throws Exception { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); DecimalFormat format = new DecimalFormat("00000000000", new DecimalFormatSymbols(Locale.ROOT)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java index 20d6e330b47..5bea673766a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java @@ -60,7 +60,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { distance = (1 << 30) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); @@ -300,7 +300,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { public void testInfiniteValues() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new FloatField("float", Float.NEGATIVE_INFINITY, Field.Store.NO)); doc.add(new IntField("int", Integer.MIN_VALUE, Field.Store.NO)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java index 21b342ad74e..7ffd89dc8f5 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java @@ -60,7 +60,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { distance = (1L << 60) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); @@ -327,7 +327,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { public void testInfiniteValues() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(new DoubleField("double", Double.NEGATIVE_INFINITY, Field.Store.NO)); doc.add(new LongField("long", Long.MIN_VALUE, Field.Store.NO)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java index dd48f0435c0..4d2f00968a9 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java @@ -215,7 +215,7 @@ public class TestPhraseQuery extends LuceneTestCase { Directory directory = newDirectory(); Analyzer stopAnalyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, stopAnalyzer)); + newIndexWriterConfig(stopAnalyzer)); Document doc = new Document(); doc.add(newTextField("field", "the stop words are here", Field.Store.YES)); writer.addDocument(doc); @@ -274,7 +274,7 @@ public class TestPhraseQuery extends LuceneTestCase { reader.close(); writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); + newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); doc = new Document(); doc.add(newTextField("contents", "map entry woo", Field.Store.YES)); writer.addDocument(doc); @@ -324,7 +324,7 @@ public class TestPhraseQuery extends LuceneTestCase { public void testSlopScoring() throws IOException { Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMergePolicy(newLogMergePolicy()) .setSimilarity(new DefaultSimilarity())); @@ -589,7 +589,7 @@ public class TestPhraseQuery extends LuceneTestCase { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(analyzer).setMergePolicy(newLogMergePolicy())); List> docs = new ArrayList<>(); Document d = new Document(); Field f = newTextField("f", "", Field.Store.NO); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java b/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java index ead4bf0684d..1fc52c02787 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPrefixRandom.java @@ -51,7 +51,7 @@ public class TestPrefixRandom extends LuceneTestCase { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom.java b/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom.java index 45c78a87072..85623e3e8c3 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom.java @@ -49,7 +49,7 @@ public class TestRegexpRandom extends LuceneTestCase { super.setUp(); dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java b/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java index ead284b9022..c0fc8423246 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestRegexpRandom2.java @@ -62,7 +62,7 @@ public class TestRegexpRandom2 extends LuceneTestCase { dir = newDirectory(); fieldName = random().nextBoolean() ? "field" : ""; // sometimes use an empty string as field name RandomIndexWriter writer = new RandomIndexWriter(random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000))); Document doc = new Document(); Field field = newStringField(fieldName, "", Field.Store.NO); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java index 2e96a6b6eb4..f0247de0fac 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestScorerPerf.java @@ -49,7 +49,7 @@ public class TestScorerPerf extends LuceneTestCase { // Create a dummy index with nothing in it. // This could possibly fail if Lucene starts checking for docid ranges... d = newDirectory(); - IndexWriter iw = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); iw.addDocument(new Document()); iw.shutdown(); r = DirectoryReader.open(d); @@ -65,7 +65,7 @@ public class TestScorerPerf extends LuceneTestCase { terms[i] = new Term("f",Character.toString((char)('A'+i))); } - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE)); for (int i=0; i 0; --i) { Document doc = new Document(); @@ -578,7 +578,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); Document doc = new Document(); @@ -607,7 +607,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); for (int i = 0; i < 10; i++) { Document doc = new Document(); @@ -650,7 +650,7 @@ public class TestTaxonomyFacetCounts extends FacetTestCase { Directory indexDir = newDirectory(); Directory taxoDir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges IndexWriter indexWriter = new IndexWriter(indexDir, iwc); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java index 750902e70c8..a736c9d9f7d 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts2.java @@ -241,7 +241,7 @@ public class TestTaxonomyFacetCounts2 extends FacetTestCase { // 3. Segment w/ categories and results // 4. Segment w/ categories, but only some results - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMergePolicy(NoMergePolicy.INSTANCE); // prevent merges, so we can control the index segments IndexWriter indexWriter = new IndexWriter(indexDir, conf); TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java index a78a4140049..c3d8c7f6f37 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetSumValueSource.java @@ -252,7 +252,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); @@ -288,7 +288,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); for (int i = 0; i < 4; i++) { Document doc = new Document(); @@ -314,7 +314,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); for (int i = 0; i < 4; i++) { @@ -366,7 +366,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); config.setHierarchical("a", true); //config.setRequireDimCount("a", true); @@ -397,7 +397,7 @@ public class TestTaxonomyFacetSumValueSource extends FacetTestCase { Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); - IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); config.setIndexFieldName("b", "$b"); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java index a14d4adde1d..1fade1c952f 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestConcurrentFacetedIndexing.java @@ -83,7 +83,7 @@ public class TestConcurrentFacetedIndexing extends FacetTestCase { final Directory indexDir = newDirectory(); final Directory taxoDir = newDirectory(); final ConcurrentHashMap values = new ConcurrentHashMap<>(); - final IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + final IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(null)); final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE, newTaxoWriterCache(numDocs.get())); final Thread[] indexThreads = new Thread[atLeast(4)]; final FacetsConfig config = new FacetsConfig(); diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java index 0555f65607a..1293de448f3 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/directory/TestDirectoryTaxonomyWriter.java @@ -437,7 +437,7 @@ public class TestDirectoryTaxonomyWriter extends FacetTestCase { @Test public void testHugeLabel() throws Exception { Directory indexDir = newDirectory(), taxoDir = newDirectory(); - IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE, new Cl2oTaxonomyWriterCache(2, 1f, 1)); FacetsConfig config = new FacetsConfig(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java index 8e5e1f2c685..3bc5319b891 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupHeadsCollectorTest.java @@ -68,8 +68,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); DocValuesType valueType = DocValuesType.SORTED; // 0 @@ -221,8 +220,7 @@ public class AllGroupHeadsCollectorTest extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); DocValuesType valueType = DocValuesType.SORTED; Document doc = new Document(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java index c9134f2502c..8dcdd50eb9f 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/AllGroupsCollectorTest.java @@ -49,8 +49,7 @@ public class AllGroupsCollectorTest extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java index 9a484521838..a8cf9873cf8 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/DistinctValuesCollectorTest.java @@ -72,8 +72,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase { RandomIndexWriter w = new RandomIndexWriter( random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); addField(doc, groupField, "1"); addField(doc, countField, "1"); @@ -409,8 +408,7 @@ public class DistinctValuesCollectorTest extends AbstractGroupingTestCase { RandomIndexWriter w = new RandomIndexWriter( random, dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()) + newIndexWriterConfig(new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()) ); int numDocs = 86 + random.nextInt(1087) * RANDOM_MULTIPLIER; diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java index 3feb0f57159..732d4fb4dd6 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupFacetCollectorTest.java @@ -65,8 +65,7 @@ public class GroupFacetCollectorTest extends AbstractGroupingTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); boolean useDv = true; // 0 @@ -290,8 +289,7 @@ public class GroupFacetCollectorTest extends AbstractGroupingTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); boolean useDv = true; // Cannot assert this since we use NoMergePolicy: @@ -509,10 +507,7 @@ public class GroupFacetCollectorTest extends AbstractGroupingTestCase { RandomIndexWriter writer = new RandomIndexWriter( random, dir, - newIndexWriterConfig( - TEST_VERSION_CURRENT, - new MockAnalyzer(random) - ) + newIndexWriterConfig(new MockAnalyzer(random)) ); Document doc = new Document(); Document docNoGroup = new Document(); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java index 850e1f9a9e6..66462a185c6 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java @@ -57,8 +57,7 @@ public class GroupingSearchTest extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); boolean canUseIDV = true; List documents = new ArrayList<>(); // 0 @@ -227,8 +226,7 @@ public class GroupingSearchTest extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); doc.add(newField("group", "foo", StringField.TYPE_NOT_STORED)); w.addDocument(doc); diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java index a987ca172f6..6835f0940f5 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java @@ -69,8 +69,7 @@ public class TestGrouping extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); addGroupField(doc, groupField, "author1"); @@ -550,8 +549,7 @@ public class TestGrouping extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); final List> updateDocs = new ArrayList<>(); @@ -671,8 +669,7 @@ public class TestGrouping extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); Document docNoGroup = new Document(); Field idvGroupField = new SortedDocValuesField("group", new BytesRef()); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java index 96edb8aa5df..4b278cc7732 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java @@ -55,7 +55,7 @@ public class HighlighterPhraseTest extends LuceneTestCase { final String TEXT = "the fox jumped"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -96,7 +96,7 @@ public class HighlighterPhraseTest extends LuceneTestCase { final String TEXT = "the fox jumped"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); @@ -164,7 +164,7 @@ public class HighlighterPhraseTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); @@ -206,7 +206,7 @@ public class HighlighterPhraseTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); @@ -245,7 +245,7 @@ public class HighlighterPhraseTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false))); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java index ee6d8b88b7e..3601dffd7ac 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java @@ -1878,8 +1878,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); dir = newDirectory(); ramDir = newDirectory(); - IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); + IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); for (String text : texts) { addDoc(writer, text); } diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java index a725ce8828c..4d804f170bf 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/TokenSourcesTest.java @@ -92,7 +92,7 @@ public class TokenSourcesTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + newIndexWriterConfig(null)); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -136,7 +136,7 @@ public class TokenSourcesTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + newIndexWriterConfig(null)); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -181,7 +181,7 @@ public class TokenSourcesTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + newIndexWriterConfig(null)); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -225,7 +225,7 @@ public class TokenSourcesTest extends LuceneTestCase { final String TEXT = "the fox did not jump"; final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + newIndexWriterConfig(null)); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); @@ -268,7 +268,7 @@ public class TokenSourcesTest extends LuceneTestCase { throws IOException, InvalidTokenOffsetsException { final Directory directory = newDirectory(); final IndexWriter indexWriter = new IndexWriter(directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + newIndexWriterConfig(null)); try { final Document document = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java index 11c9457595b..548fe92fe16 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java @@ -65,7 +65,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -117,7 +117,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -169,7 +169,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -221,7 +221,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -282,7 +282,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -392,7 +392,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -445,7 +445,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -488,7 +488,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -529,7 +529,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -571,7 +571,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -612,7 +612,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -654,7 +654,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -696,7 +696,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -739,7 +739,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -783,7 +783,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java index a0b0346774d..a496c710870 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java @@ -55,7 +55,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testBasics() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -125,7 +125,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { final Analyzer analyzer = new MockAnalyzer(random()); Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -160,7 +160,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { // simple test highlighting last word. public void testHighlightLastWord() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -193,7 +193,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testOneSentence() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -229,7 +229,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMaxLengthWithMultivalue() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -264,7 +264,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMultipleFields() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -305,7 +305,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMultipleTerms() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -342,7 +342,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMultiplePassages() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -376,7 +376,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testUserFailedToIndexOffsets() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -520,7 +520,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testPassageRanking() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -579,7 +579,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testHighlightAllText() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -615,7 +615,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testSpecificDocIDs() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -653,7 +653,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testCustomFieldValueSource() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -702,7 +702,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { * there were no hits. */ public void testEmptyHighlights() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -733,7 +733,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { * highlight is returned. */ public void testCustomEmptyHighlights() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -769,7 +769,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { * are no hits and BreakIterator is null. */ public void testEmptyHighlightsWhole() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -805,7 +805,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { * field. */ public void testFieldIsMissing() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -834,7 +834,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testFieldIsJustSpace() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -870,7 +870,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testFieldIsEmptyString() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -906,7 +906,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMultipleDocs() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -956,7 +956,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testMultipleSnippetSizes() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -991,7 +991,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testEncode() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -1029,7 +1029,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { public void testGapSeparator() throws Exception { Directory dir = newDirectory(); // use simpleanalyzer for more natural tokenization (else "test." is a token) - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -1072,7 +1072,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { // LUCENE-4906 public void testObjectFormatter() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java index 448ff12b977..26af9b6c406 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java @@ -247,7 +247,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase { /** sets b=0 to disable passage length normalization */ public void testCustomB() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -285,7 +285,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase { /** sets k1=0 for simple coordinate-level match (# of query terms present) */ public void testCustomK1() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true)); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java index e3742da1ea0..6aaf517f2b8 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/FastVectorHighlighterTest.java @@ -59,7 +59,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testSimpleHighlightTest() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); type.setStoreTermVectorOffsets(true); @@ -89,7 +89,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testPhraseHighlightLongTextTest() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); type.setStoreTermVectorOffsets(true); @@ -135,7 +135,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { // see LUCENE-4899 public void testPhraseHighlightTest() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_STORED); type.setStoreTermVectorOffsets(true); @@ -262,7 +262,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testBoostedPhraseHighlightTest() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer( random() ) ) ); + IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer( random() ) ) ); Document doc = new Document(); FieldType type = new FieldType( TextField.TYPE_STORED ); type.setStoreTermVectorOffsets( true ); @@ -307,7 +307,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testCommonTermsQueryHighlight() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET))); FieldType type = new FieldType(TextField.TYPE_STORED); type.setStoreTermVectorOffsets(true); type.setStoreTermVectorPositions(true); @@ -461,7 +461,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testMultiValuedSortByScore() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer( random() ) ) ); + IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer( random() ) ) ); Document doc = new Document(); FieldType type = new FieldType( TextField.TYPE_STORED ); type.setStoreTermVectorOffsets( true ); @@ -509,7 +509,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { public void testBooleanPhraseWithSynonym() throws IOException { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType type = new FieldType(TextField.TYPE_NOT_STORED); type.setStoreTermVectorOffsets(true); @@ -613,7 +613,7 @@ public class FastVectorHighlighterTest extends LuceneTestCase { }; Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer ) ); + IndexWriter writer = new IndexWriter( dir, newIndexWriterConfig(analyzer)); writer.addDocument( doc ); FastVectorHighlighter highlighter = new FastVectorHighlighter(); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java index c8881604ca4..eb814c5e770 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java @@ -237,8 +237,7 @@ public class SimpleFragmentsBuilderTest extends AbstractTestCase { RandomIndexWriter writer = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.setStoreTermVectors(true); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index acf07b6b564..752bed6ccfb 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -388,8 +388,7 @@ public class TestBlockJoin extends LuceneTestCase { final RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); // Cannot assert this since we use NoMergePolicy: w.setDoRandomForceMergeAssert(false); @@ -1159,7 +1158,7 @@ public class TestBlockJoin extends LuceneTestCase { public void testAdvanceSingleParentNoChild() throws Exception { Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy())); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(new LogDocMergePolicy())); Document parentDoc = new Document(); parentDoc.add(newStringField("parent", "1", Field.Store.NO)); parentDoc.add(newStringField("isparent", "yes", Field.Store.NO)); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSorting.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSorting.java index 72a9247ef7d..68ce315d467 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSorting.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinSorting.java @@ -53,8 +53,8 @@ public class TestBlockJoinSorting extends LuceneTestCase { @Test public void testNestedSorting() throws Exception { final Directory dir = newDirectory(); - final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())) + .setMergePolicy(NoMergePolicy.INSTANCE)); List docs = new ArrayList<>(); Document document = new Document(); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java index cc0954d6f62..4f619e2ebb6 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestJoinUtil.java @@ -85,8 +85,7 @@ public class TestJoinUtil extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); @@ -186,8 +185,7 @@ public class TestJoinUtil extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); @@ -235,8 +233,7 @@ public class TestJoinUtil extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); @@ -325,8 +322,7 @@ public class TestJoinUtil extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, - new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); // 0 Document doc = new Document(); @@ -443,7 +439,7 @@ public class TestJoinUtil extends LuceneTestCase { RandomIndexWriter w = new RandomIndexWriter( random(), dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy()) + newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy()) ); final boolean scoreDocsInOrder = TestJoinUtil.random().nextBoolean(); IndexIterationContext context = createContext(numberOfDocumentsToIndex, w, multipleValuesPerDocument, scoreDocsInOrder); diff --git a/lucene/misc/src/test/org/apache/lucene/document/TestLazyDocument.java b/lucene/misc/src/test/org/apache/lucene/document/TestLazyDocument.java index 9971c3bee32..4f51741265e 100644 --- a/lucene/misc/src/test/org/apache/lucene/document/TestLazyDocument.java +++ b/lucene/misc/src/test/org/apache/lucene/document/TestLazyDocument.java @@ -56,7 +56,7 @@ public class TestLazyDocument extends LuceneTestCase { Analyzer analyzer = new MockAnalyzer(random()); IndexWriter writer = new IndexWriter - (dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + (dir, newIndexWriterConfig(analyzer)); try { for (int docid = 0; docid < NUM_DOCS; docid++) { Document d = new Document(); diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java b/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java index 190d908b969..7e6401bc794 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java +++ b/lucene/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java @@ -33,7 +33,7 @@ public class TestMultiPassIndexSplitter extends LuceneTestCase { public void setUp() throws Exception { super.setUp(); dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); Document doc; for (int i = 0; i < NUM_DOCS; i++) { doc = new Document(); diff --git a/lucene/misc/src/test/org/apache/lucene/index/TestPKIndexSplitter.java b/lucene/misc/src/test/org/apache/lucene/index/TestPKIndexSplitter.java index 67c296bdb9c..dec88072e0d 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/TestPKIndexSplitter.java +++ b/lucene/misc/src/test/org/apache/lucene/index/TestPKIndexSplitter.java @@ -36,8 +36,7 @@ public class TestPKIndexSplitter extends LuceneTestCase { public void testSplit() throws Exception { NumberFormat format = new DecimalFormat("000000000", DecimalFormatSymbols.getInstance(Locale.ROOT)); Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) .setOpenMode(OpenMode.CREATE).setMergePolicy(NoMergePolicy.INSTANCE)); for (int x = 0; x < 11; x++) { Document doc = createDocument(x, "1", 3, format); @@ -56,8 +55,7 @@ public class TestPKIndexSplitter extends LuceneTestCase { checkSplitting(dir, midTerm, 11, 9); // delete some documents - w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) + w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) .setOpenMode(OpenMode.APPEND).setMergePolicy(NoMergePolicy.INSTANCE)); w.deleteDocuments(midTerm); w.deleteDocuments(new Term("id", format.format(2))); @@ -72,8 +70,8 @@ public class TestPKIndexSplitter extends LuceneTestCase { Directory dir1 = newDirectory(); Directory dir2 = newDirectory(); PKIndexSplitter splitter = new PKIndexSplitter(dir, dir1, dir2, splitTerm, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())), - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + newIndexWriterConfig(new MockAnalyzer(random())), + newIndexWriterConfig(new MockAnalyzer(random()))); splitter.split(); IndexReader ir1 = DirectoryReader.open(dir1); diff --git a/lucene/misc/src/test/org/apache/lucene/index/sorter/IndexSortingTest.java b/lucene/misc/src/test/org/apache/lucene/index/sorter/IndexSortingTest.java index 8b8ec872957..6b6995da3c5 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/sorter/IndexSortingTest.java +++ b/lucene/misc/src/test/org/apache/lucene/index/sorter/IndexSortingTest.java @@ -67,7 +67,7 @@ public class IndexSortingTest extends SorterTestBase { } Directory target = newDirectory(); - IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter writer = new IndexWriter(target, newIndexWriterConfig(null)); reader = SortingAtomicReader.wrap(reader, sorter); writer.addIndexes(reader); writer.shutdown(); diff --git a/lucene/misc/src/test/org/apache/lucene/index/sorter/SorterTestBase.java b/lucene/misc/src/test/org/apache/lucene/index/sorter/SorterTestBase.java index 484f09ec186..3142e2f36df 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/sorter/SorterTestBase.java +++ b/lucene/misc/src/test/org/apache/lucene/index/sorter/SorterTestBase.java @@ -207,7 +207,7 @@ public abstract class SorterTestBase extends LuceneTestCase { } PositionsTokenStream positions = new PositionsTokenStream(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random)); conf.setMaxBufferedDocs(4); // create some segments conf.setSimilarity(new NormsSimilarity(conf.getSimilarity())); // for testing norms field RandomIndexWriter writer = new RandomIndexWriter(random, dir, conf); diff --git a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestBlockJoinSorter.java b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestBlockJoinSorter.java index 37ccd59b2cd..da138073c82 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestBlockJoinSorter.java +++ b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestBlockJoinSorter.java @@ -64,7 +64,7 @@ public class TestBlockJoinSorter extends LuceneTestCase { public void test() throws IOException { final int numParents = atLeast(200); - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); cfg.setMergePolicy(newLogMergePolicy()); final RandomIndexWriter writer = new RandomIndexWriter(random(), newDirectory(), cfg); final Document parentDoc = new Document(); diff --git a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestEarlyTermination.java b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestEarlyTermination.java index 3af59286ab4..e165512eba2 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestEarlyTermination.java +++ b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestEarlyTermination.java @@ -83,7 +83,7 @@ public class TestEarlyTermination extends LuceneTestCase { } terms = new ArrayList<>(randomTerms); final long seed = random().nextLong(); - final IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))); + final IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(new Random(seed))); iwc.setMergeScheduler(new SerialMergeScheduler()); // for reproducible tests iwc.setMergePolicy(TestSortingMergePolicy.newSortingMergePolicy(sort)); iw = new RandomIndexWriter(new Random(seed), dir, iwc); diff --git a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestSortingMergePolicy.java b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestSortingMergePolicy.java index 65d47641bd1..27f3e0d8d2b 100644 --- a/lucene/misc/src/test/org/apache/lucene/index/sorter/TestSortingMergePolicy.java +++ b/lucene/misc/src/test/org/apache/lucene/index/sorter/TestSortingMergePolicy.java @@ -103,8 +103,8 @@ public class TestSortingMergePolicy extends LuceneTestCase { } terms = new ArrayList<>(randomTerms); final long seed = random().nextLong(); - final IndexWriterConfig iwc1 = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))); - final IndexWriterConfig iwc2 = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(new Random(seed))); + final IndexWriterConfig iwc1 = newIndexWriterConfig(new MockAnalyzer(new Random(seed))); + final IndexWriterConfig iwc2 = newIndexWriterConfig(new MockAnalyzer(new Random(seed))); iwc2.setMergePolicy(newSortingMergePolicy(sort)); final RandomIndexWriter iw1 = new RandomIndexWriter(new Random(seed), dir1, iwc1); final RandomIndexWriter iw2 = new RandomIndexWriter(new Random(seed), dir2, iwc2); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java index 9f40207167f..f05cf4c4837 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestDocTermOrds.java @@ -66,7 +66,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testSimple() throws Exception { Directory dir = newDirectory(); - final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + final RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); Field field = newTextField("field", "", Field.Store.NO); doc.add(field); @@ -124,7 +124,7 @@ public class TestDocTermOrds extends LuceneTestCase { final int NUM_DOCS = atLeast(100); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // Sometimes swap in codec that impls ord(): if (random().nextInt(10) == 7) { @@ -222,7 +222,7 @@ public class TestDocTermOrds extends LuceneTestCase { final int NUM_DOCS = atLeast(100); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); // Sometimes swap in codec that impls ord(): if (random().nextInt(10) == 7) { @@ -395,7 +395,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testBackToTheFuture() throws Exception { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(newStringField("foo", "bar", Field.Store.NO)); @@ -427,7 +427,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testNumericEncoded32() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new IntField("foo", 5, Field.Store.NO)); @@ -468,7 +468,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testNumericEncoded64() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new LongField("foo", 5, Field.Store.NO)); @@ -510,7 +510,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testSortedTermsEnum() throws IOException { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -593,7 +593,7 @@ public class TestDocTermOrds extends LuceneTestCase { public void testActuallySingleValued() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwconfig = newIndexWriterConfig(null); iwconfig.setMergePolicy(newLogMergePolicy()); IndexWriter iw = new IndexWriter(dir, iwconfig); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java index b8631734cbe..e8f7597574a 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCache.java @@ -76,7 +76,7 @@ public class TestFieldCache extends LuceneTestCase { NUM_DOCS = atLeast(500); NUM_ORDS = atLeast(2); directory = newDirectory(); - RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer= new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); long theLong = Long.MAX_VALUE; double theDouble = Double.MAX_VALUE; int theInt = Integer.MAX_VALUE; @@ -289,7 +289,7 @@ public class TestFieldCache extends LuceneTestCase { public void testEmptyIndex() throws Exception { Directory dir = newDirectory(); - IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(500)); + IndexWriter writer= new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(500)); writer.shutdown(); IndexReader r = DirectoryReader.open(dir); AtomicReader reader = SlowCompositeReaderWrapper.wrap(r); @@ -424,7 +424,7 @@ public class TestFieldCache extends LuceneTestCase { public void testDocValuesIntegration() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwc = newIndexWriterConfig(null); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); doc.add(new BinaryDocValuesField("binary", new BytesRef("binary value"))); @@ -670,7 +670,7 @@ public class TestFieldCache extends LuceneTestCase { // Make sure that the use of GrowableWriter doesn't prevent from using the full long range public void testLongFieldCache() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); cfg.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); Document doc = new Document(); @@ -716,7 +716,7 @@ public class TestFieldCache extends LuceneTestCase { // Make sure that the use of GrowableWriter doesn't prevent from using the full int range public void testIntFieldCache() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); cfg.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg); Document doc = new Document(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java index ea6a359bc91..225196ee54d 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheReopen.java @@ -39,7 +39,7 @@ public class TestFieldCacheReopen extends LuceneTestCase { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter( dir, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())). + newIndexWriterConfig(new MockAnalyzer(random())). setMergePolicy(newLogMergePolicy(10)) ); Document doc = new Document(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java index 1dc461afdf4..6672c612d62 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSanityChecker.java @@ -50,8 +50,8 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase { dirA = newDirectory(); dirB = newDirectory(); - IndexWriter wA = new IndexWriter(dirA, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); - IndexWriter wB = new IndexWriter(dirB, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter wA = new IndexWriter(dirA, newIndexWriterConfig(new MockAnalyzer(random()))); + IndexWriter wB = new IndexWriter(dirB, newIndexWriterConfig(new MockAnalyzer(random()))); long theLong = Long.MAX_VALUE; double theDouble = Double.MAX_VALUE; diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java index 9ba1cab748c..02a9639e4c4 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java @@ -992,8 +992,7 @@ public class TestFieldCacheSort extends LuceneTestCase { public void testEmptyStringVsNullStringSort() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); doc.add(newStringField("f", "", Field.Store.NO)); doc.add(newStringField("t", "1", Field.Store.NO)); @@ -1020,8 +1019,7 @@ public class TestFieldCacheSort extends LuceneTestCase { /** test that we throw exception on multi-valued field, creates corrupt reader, use SORTED_SET instead */ public void testMultiValuedField() throws IOException { Directory indexStore = newDirectory(); - IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(indexStore, newIndexWriterConfig(new MockAnalyzer(random()))); for(int i=0; i<5; i++) { Document doc = new Document(); doc.add(new StringField("string", "a"+i, Field.Store.NO)); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java index aee05369367..378116a23dd 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheVsDocValues.java @@ -134,7 +134,7 @@ public class TestFieldCacheVsDocValues extends LuceneTestCase { } else { numDocs = TestUtil.nextInt(random(), 100, 200); } - IndexWriter w = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(d, newIndexWriterConfig(analyzer)); List docBytes = new ArrayList<>(); long totalBytes = 0; for(int docID=0;docID docBytes = new ArrayList<>(); long totalBytes = 0; for(int docID=0;docID numbers = new ArrayList<>(); final List binary = new ArrayList<>(); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java index ae649f1eee7..98c37373b2f 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java @@ -59,7 +59,7 @@ public class TestNumericTerms32 extends LuceneTestCase { distance = (1 << 30) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java index ba3a27d85ca..6368e2c435f 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java @@ -59,7 +59,7 @@ public class TestNumericTerms64 extends LuceneTestCase { distance = (1L << 60) / noDocs; directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 100, 1000)) .setMergePolicy(newLogMergePolicy())); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java index 9d0ed3bd616..9952ecd8589 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestUninvertingReader.java @@ -39,7 +39,7 @@ public class TestUninvertingReader extends LuceneTestCase { public void testSortedSetInteger() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new IntField("foo", 5, Field.Store.NO)); @@ -80,7 +80,7 @@ public class TestUninvertingReader extends LuceneTestCase { public void testSortedSetFloat() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new IntField("foo", Float.floatToRawIntBits(5f), Field.Store.NO)); @@ -122,7 +122,7 @@ public class TestUninvertingReader extends LuceneTestCase { public void testSortedSetLong() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new LongField("foo", 5, Field.Store.NO)); @@ -163,7 +163,7 @@ public class TestUninvertingReader extends LuceneTestCase { public void testSortedSetDouble() throws IOException { Directory dir = newDirectory(); - IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new LongField("foo", Double.doubleToRawLongBits(5d), Field.Store.NO)); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java b/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java index 0a8d50db650..1f573ede808 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/FunctionTestSetup.java @@ -94,7 +94,7 @@ public abstract class FunctionTestSetup extends LuceneTestCase { // prepare a small index with just a few documents. dir = newDirectory(); anlzr = new MockAnalyzer(random()); - IndexWriterConfig iwc = newIndexWriterConfig( TEST_VERSION_CURRENT, anlzr).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig iwc = newIndexWriterConfig(anlzr).setMergePolicy(newLogMergePolicy()); if (doMultiSegment) { iwc.setMaxBufferedDocs(TestUtil.nextInt(random(), 2, 7)); } diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestBoostedQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestBoostedQuery.java index f0e12feca89..b1cbb149c09 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestBoostedQuery.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestBoostedQuery.java @@ -51,7 +51,7 @@ public class TestBoostedQuery extends LuceneTestCase { @BeforeClass public static void beforeClass() throws Exception { dir = newDirectory(); - IndexWriterConfig iwConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConfig = newIndexWriterConfig(new MockAnalyzer(random())); iwConfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConfig); Document document = new Document(); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java index 2411da28646..6b98c7272f7 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestDocValuesFieldSources.java @@ -46,7 +46,7 @@ public class TestDocValuesFieldSources extends LuceneTestCase { public void test(DocValuesType type) throws IOException { Directory d = newDirectory(); - IndexWriterConfig iwConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConfig = newIndexWriterConfig(new MockAnalyzer(random())); final int nDocs = atLeast(50); final Field id = new NumericDocValuesField("id", 0); final Field f; diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java index e5cbc519702..f9ae42257e7 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFunctionQuerySort.java @@ -43,7 +43,7 @@ public class TestFunctionQuerySort extends LuceneTestCase { public void testSearchAfterWhenSortingByFunctionValues() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setMergePolicy(newLogMergePolicy()); // depends on docid order RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java index 716e974b580..8f201add63e 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestLongNormValueSource.java @@ -51,7 +51,7 @@ public class TestLongNormValueSource extends LuceneTestCase { @BeforeClass public static void beforeClass() throws Exception { dir = newDirectory(); - IndexWriterConfig iwConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConfig = newIndexWriterConfig(new MockAnalyzer(random())); iwConfig.setMergePolicy(newLogMergePolicy()); iwConfig.setSimilarity(sim); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConfig); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java index d45594c44da..791621cca22 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestSortedSetFieldSource.java @@ -35,7 +35,7 @@ import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; public class TestSortedSetFieldSource extends LuceneTestCase { public void testSimple() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new SortedSetDocValuesField("value", new BytesRef("baz"))); doc.add(newStringField("id", "2", Field.Store.YES)); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java index 90927d3f0d3..719b2ae978a 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestValueSources.java @@ -97,7 +97,7 @@ public class TestValueSources extends LuceneTestCase { @BeforeClass public static void beforeClass() throws Exception { dir = newDirectory(); - IndexWriterConfig iwConfig = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConfig = newIndexWriterConfig(new MockAnalyzer(random())); iwConfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConfig); Document document = new Document(); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java index 9ee09873e72..80f4e3d6edc 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/classic/TestMultiFieldQueryParser.java @@ -286,7 +286,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase { public void testStopWordSearching() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); Directory ramDir = newDirectory(); - IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(analyzer)); Document doc = new Document(); doc.add(newTextField("body", "blah the footest blah", Field.Store.NO)); iw.addDocument(doc); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java index 35351019ef7..b82748d0702 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java @@ -171,7 +171,7 @@ public class TestComplexPhraseQuery extends LuceneTestCase { analyzer = new MockAnalyzer(random()); rd = newDirectory(); - IndexWriter w = new IndexWriter(rd, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(rd, newIndexWriterConfig(analyzer)); for (int i = 0; i < docsContent.length; i++) { Document doc = new Document(); doc.add(newTextField("name", docsContent[i].name, Field.Store.YES)); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java index 73c1a39d23b..e57c550f37a 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestMultiFieldQPHelper.java @@ -320,7 +320,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase { public void testStopWordSearching() throws Exception { Analyzer analyzer = new MockAnalyzer(random()); Directory ramDir = newDirectory(); - IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter iw = new IndexWriter(ramDir, newIndexWriterConfig(analyzer)); Document doc = new Document(); doc.add(newTextField("body", "blah the footest blah", Field.Store.NO)); iw.addDocument(doc); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java index 6f88c28e3cb..11bc9440a34 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestNumericQueryParser.java @@ -189,7 +189,7 @@ public class TestNumericQueryParser extends LuceneTestCase { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, - newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())) + newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000)) .setMergePolicy(newLogMergePolicy())); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java index 5068210f33a..93dabfe584c 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/flexible/standard/TestQPHelper.java @@ -1310,7 +1310,7 @@ public class TestQPHelper extends LuceneTestCase { public void testMultiPhraseQuery() throws Exception { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new CannedAnalyzer())); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new CannedAnalyzer())); Document doc = new Document(); doc.add(newTextField("field", "", Field.Store.NO)); w.addDocument(doc); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java index f76c9e1a126..874f4881e24 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/util/QueryParserTestBase.java @@ -1089,7 +1089,7 @@ public abstract class QueryParserTestBase extends LuceneTestCase { public void testPositionIncrements() throws Exception { Directory dir = newDirectory(); Analyzer a = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, a)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(a)); Document doc = new Document(); doc.add(newTextField("field", "the wizard of ozzy", Field.Store.NO)); w.addDocument(doc); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestParser.java index eb761c3a2ce..22f7cc7f714 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestParser.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestParser.java @@ -66,7 +66,7 @@ public class TestParser extends LuceneTestCase { BufferedReader d = new BufferedReader(new InputStreamReader( TestParser.class.getResourceAsStream("reuters21578.txt"), StandardCharsets.US_ASCII)); dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(analyzer)); String line = d.readLine(); while (line != null) { int endOfDate = line.indexOf('\t'); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java index 09f2cb1cf41..78839ed41bd 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java @@ -145,7 +145,7 @@ public class TestQueryTemplateManager extends LuceneTestCase { analyzer = new MockAnalyzer(random()); //Create an index dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer)); for (String docFieldValue : docFieldValues) { w.addDocument(getDocumentFromString(docFieldValue)); } diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeFilterBuilder.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeFilterBuilder.java index af94ede860d..81baab4f7bc 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeFilterBuilder.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/builders/TestNumericRangeFilterBuilder.java @@ -63,7 +63,7 @@ public class TestNumericRangeFilterBuilder extends LuceneTestCase { Document doc = getDocumentFromString(xml); Filter filter = filterBuilder.getFilter(doc.getDocumentElement()); Directory ramDir = newDirectory(); - IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null)); + IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(null)); writer.commit(); try { AtomicReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(ramDir)); diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java index c23732e591f..c75ca1f31cd 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexAndTaxonomyReplicationClientTest.java @@ -198,7 +198,7 @@ public class IndexAndTaxonomyReplicationClientTest extends ReplicatorTestCase { handler = new IndexAndTaxonomyReplicationHandler(handlerIndexDir, handlerTaxoDir, callback); client = new ReplicationClient(replicator, handler, sourceDirFactory); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); publishIndexWriter = new IndexWriter(publishIndexDir, conf); publishTaxoWriter = new SnapshotDirectoryTaxonomyWriter(publishTaxoDir); diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexReplicationClientTest.java b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexReplicationClientTest.java index 1e186d75c58..9c495b6234e 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/IndexReplicationClientTest.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/IndexReplicationClientTest.java @@ -142,7 +142,7 @@ public class IndexReplicationClientTest extends ReplicatorTestCase { handler = new IndexReplicationHandler(handlerDir, callback); client = new ReplicationClient(replicator, handler, sourceDirFactory); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); publishWriter = new IndexWriter(publishDir, conf); } diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/LocalReplicatorTest.java b/lucene/replicator/src/test/org/apache/lucene/replicator/LocalReplicatorTest.java index b60c2ec68cc..153ae22af2f 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/LocalReplicatorTest.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/LocalReplicatorTest.java @@ -50,7 +50,7 @@ public class LocalReplicatorTest extends ReplicatorTestCase { public void setUp() throws Exception { super.setUp(); sourceDir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); sourceWriter = new IndexWriter(sourceDir, conf); replicator = new LocalReplicator(); diff --git a/lucene/replicator/src/test/org/apache/lucene/replicator/http/HttpReplicatorTest.java b/lucene/replicator/src/test/org/apache/lucene/replicator/http/HttpReplicatorTest.java index a65ba03ceff..d0b1a03713e 100644 --- a/lucene/replicator/src/test/org/apache/lucene/replicator/http/HttpReplicatorTest.java +++ b/lucene/replicator/src/test/org/apache/lucene/replicator/http/HttpReplicatorTest.java @@ -86,7 +86,7 @@ public class HttpReplicatorTest extends ReplicatorTestCase { serverReplicator = new LocalReplicator(); startServer(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); writer = new IndexWriter(serverIndexDir, conf); reader = DirectoryReader.open(writer, false); diff --git a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java index 9ff8acef959..09fff7bce08 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java +++ b/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java @@ -66,7 +66,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testBasic() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -189,7 +189,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testRandom() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); int minItemsInBlock = TestUtil.nextInt(random(), 2, 50); int maxItemsInBlock = 2*(minItemsInBlock-1) + random().nextInt(50); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat(minItemsInBlock, maxItemsInBlock))); @@ -358,7 +358,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOneDocPerIDOneSegment() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -379,7 +379,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOneDocPerIDTwoSegments() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); iwc.setMergePolicy(new TieredMergePolicy()); MergeScheduler ms = iwc.getMergeScheduler(); @@ -415,7 +415,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOneDocPerIDWithUpdates() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -432,7 +432,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOneDocPerIDWithDeletes() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -460,7 +460,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { return new TokenStreamComponents(tokenizer, filt); } }; - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, a); + IndexWriterConfig iwc = newIndexWriterConfig(a); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -479,7 +479,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMissingPositions() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -498,7 +498,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testInvalidPayload() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -517,7 +517,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOneDocPerIDWithDeletesAcrossSegments() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -537,7 +537,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { // non-deleted documents on flush, CheckIndex will see this as corruption: public void testCannotIndexTermVectors() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -565,7 +565,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testMoreThanOnceInSingleDoc() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -584,7 +584,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { public void testInvalidVersions() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -615,7 +615,7 @@ public class TestIDVersionPostingsFormat extends LuceneTestCase { // Simulates optimistic concurrency in a distributed indexing app and confirms the latest version always wins: public void testGlobalVersions() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setCodec(TestUtil.alwaysPostingsFormat(new IDVersionPostingsFormat())); final RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java index 317003fd90c..ca3ae5e9c2d 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java @@ -44,7 +44,7 @@ public class DuplicateFilterTest extends LuceneTestCase { public void setUp() throws Exception { super.setUp(); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); //Add series of docs with filterable fields : url, text and dates flags addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101"); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java index d656a58edf4..41a4d95ee65 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/FuzzyLikeThisQueryTest.java @@ -47,7 +47,7 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase { analyzer = new MockAnalyzer(random()); directory = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy())); //Add series of docs with misspelt names addDoc(writer, "jonathon smythe", "1"); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java index 8b9a16ec022..22610f8e3cd 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery2.java @@ -90,7 +90,7 @@ public class TestSlowFuzzyQuery2 extends LuceneTestCase { int terms = (int) Math.pow(2, bits); Directory dir = newDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())); + RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.KEYWORD, false)).setMergePolicy(newLogMergePolicy())); Document doc = new Document(); Field field = newTextField("field", "", Field.Store.NO); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java index 39d277917dd..4a210110a07 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java @@ -53,8 +53,7 @@ public class TestSpanRegexQuery extends LuceneTestCase { public void testSpanRegex() throws Exception { Directory directory = newDirectory(); - IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig( - TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); // doc.add(newField("field", "the quick brown fox jumps over the lazy dog", // Field.Store.NO, Field.Index.ANALYZED)); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java index 364487b09ff..cdcdf6f8f18 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java @@ -474,7 +474,7 @@ public class TestTermAutomatonQuery extends LuceneTestCase { } }; - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); RandomIndexWriter w = new RandomIndexWriter(random(), dir, iwc); for(int i=0;i, Map> res = generateIndexDocuments(atLeast(1000), true, false); @@ -169,7 +169,7 @@ public class DocumentDictionaryTest extends LuceneTestCase { @Test public void testWithoutPayload() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map.Entry, Map> res = generateIndexDocuments(atLeast(1000), false, false); @@ -205,7 +205,7 @@ public class DocumentDictionaryTest extends LuceneTestCase { @Test public void testWithContexts() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map.Entry, Map> res = generateIndexDocuments(atLeast(1000), true, true); @@ -246,7 +246,7 @@ public class DocumentDictionaryTest extends LuceneTestCase { @Test public void testWithDeletions() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map.Entry, Map> res = generateIndexDocuments(atLeast(1000), false, false); diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/DocumentValueSourceDictionaryTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/DocumentValueSourceDictionaryTest.java index 2bfc4b1e166..78a3cc69398 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/DocumentValueSourceDictionaryTest.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/DocumentValueSourceDictionaryTest.java @@ -84,7 +84,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { @Test public void testEmptyReader() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); // Make sure the index is created? RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); @@ -105,7 +105,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { @Test public void testBasic() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map docs = generateIndexDocuments(atLeast(100)); @@ -137,7 +137,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { @Test public void testWithContext() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map docs = generateIndexDocuments(atLeast(100)); @@ -174,7 +174,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { @Test public void testWithoutPayload() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map docs = generateIndexDocuments(atLeast(100)); @@ -206,7 +206,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { @Test public void testWithDeletions() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map docs = generateIndexDocuments(atLeast(100)); @@ -260,7 +260,7 @@ public class DocumentValueSourceDictionaryTest extends LuceneTestCase { public void testWithValueSource() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc); Map docs = generateIndexDocuments(atLeast(100)); diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/TestHighFrequencyDictionary.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/TestHighFrequencyDictionary.java index 91f57142132..036a0ce5ee6 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/TestHighFrequencyDictionary.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/TestHighFrequencyDictionary.java @@ -30,7 +30,7 @@ import org.apache.lucene.util.LuceneTestCase; public class TestHighFrequencyDictionary extends LuceneTestCase { public void testEmpty() throws Exception { Directory dir = newDirectory(); - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.commit(); writer.shutdown(); IndexReader ir = DirectoryReader.open(dir); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java index 79c18a2f36e..34006cf5011 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java @@ -348,7 +348,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -375,7 +375,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -413,7 +413,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -440,7 +440,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -467,7 +467,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); String longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; @@ -503,7 +503,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -543,7 +543,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); String longTerm = "longtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongtermlongterm"; @@ -580,7 +580,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -610,7 +610,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -646,7 +646,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -689,7 +689,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testSortedMergeAwayAllValues() throws IOException { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -726,7 +726,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -749,7 +749,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -777,7 +777,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testSortedTermsEnum() throws IOException { Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -849,7 +849,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -878,7 +878,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -907,7 +907,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -933,7 +933,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -958,7 +958,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -983,7 +983,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -1010,7 +1010,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testDocValuesSimple() throws IOException { Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setMergePolicy(newLogMergePolicy()); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < 5; i++) { @@ -1050,7 +1050,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testRandomSortedBytes() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); if (!defaultCodecSupportsDocsWithField()) { // if the codec doesnt support missing, we expect missing to be mapped to byte[] // by the impersonator, but we have to give it a chance to merge them to this @@ -1148,7 +1148,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes private void doTestNumericsVsStoredFields(LongProducer longs) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); @@ -1203,7 +1203,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes private void doTestSortedNumericsVsStoredFields(LongProducer counts, LongProducer values) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); // index some docs @@ -1301,7 +1301,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes private void doTestBinaryVsStoredFields(int minLength, int maxLength) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); @@ -1371,7 +1371,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes private void doTestSortedVsStoredFields(int minLength, int maxLength) throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); @@ -1503,7 +1503,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1605,7 +1605,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1654,7 +1654,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1686,7 +1686,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1720,7 +1720,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1753,7 +1753,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1787,7 +1787,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -1816,7 +1816,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); @@ -2085,7 +2085,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testTwoNumbersOneMissing() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2114,7 +2114,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testTwoNumbersOneMissingWithMerging() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2144,7 +2144,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testThreeNumbersOneMissingWithMerging() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2180,7 +2180,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testTwoBytesOneMissing() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2211,7 +2211,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testTwoBytesOneMissingWithMerging() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2243,7 +2243,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes public void testThreeBytesOneMissingWithMerging() throws IOException { assumeTrue("Codec does not support getDocsWithField", defaultCodecSupportsDocsWithField()); Directory directory = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig conf = newIndexWriterConfig(null); conf.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), directory, conf); Document doc = new Document(); @@ -2282,7 +2282,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes /** Tests dv against stored fields with threads (binary/numeric/sorted, no missing) */ public void testThreads() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); Field idField = new StringField("id", "", Field.Store.NO); @@ -2374,7 +2374,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_SET", defaultCodecSupportsSortedSet()); assumeTrue("Codec does not support SORTED_NUMERIC", defaultCodecSupportsSortedNumeric()); Directory dir = newDirectory(); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Field idField = new StringField("id", "", Field.Store.NO); Field storedBinField = new StoredField("storedBin", new byte[0]); @@ -2716,7 +2716,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assumeTrue("Codec does not support SORTED_NUMERIC", defaultCodecSupportsSortedNumeric()); Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random()); - IndexWriterConfig iwconfig = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwconfig = newIndexWriterConfig(analyzer); iwconfig.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, iwconfig); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java index a4a1181dbe1..3bfff7a89ee 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseIndexFileFormatTestCase.java @@ -209,7 +209,7 @@ abstract class BaseIndexFileFormatTestCase extends LuceneTestCase { Codec.setDefault(new RandomCodec(random(), avoidCodecs)); } Directory dir = newDirectory(); - IndexWriterConfig cfg = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random())); IndexWriter w = new IndexWriter(dir, cfg); // we need to index enough documents so that constant overhead doesn't dominate final int numDocs = atLeast(10000); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseMergePolicyTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseMergePolicyTestCase.java index a94fad2a330..3437a04dcec 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseMergePolicyTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseMergePolicyTestCase.java @@ -46,7 +46,7 @@ public abstract class BaseMergePolicyTestCase extends LuceneTestCase { super.merge(writer, trigger, newMergesFound); } }; - IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(mergeScheduler).setMergePolicy(mergePolicy())); + IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergeScheduler(mergeScheduler).setMergePolicy(mergePolicy())); writer.getConfig().getMergePolicy().setNoCFSRatio(random().nextBoolean() ? 0 : 1); final int numSegments = TestUtil.nextInt(random(), 2, 20); for (int i = 0; i < numSegments; ++i) { diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java index 464ab70d0a8..546d43848ed 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseNormsFormatTestCase.java @@ -148,7 +148,7 @@ public abstract class BaseNormsFormatTestCase extends BaseIndexFileFormatTestCas Directory dir = newDirectory(); Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.KEYWORD, false); - IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setSimilarity(new CannedNormSimilarity(norms)); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java index 71b2b0b3898..fbad8896176 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BasePostingsFormatTestCase.java @@ -1411,7 +1411,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest public void testJustEmptyField() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(getCodec()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -1436,7 +1436,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest public void testEmptyFieldAndEmptyTerm() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(getCodec()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); Document doc = new Document(); @@ -1463,7 +1463,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest // TODO: can this be improved? public void testGhosts() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, null); + IndexWriterConfig iwc = newIndexWriterConfig(null); iwc.setCodec(getCodec()); iwc.setMergePolicy(newLogMergePolicy()); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); @@ -1505,7 +1505,7 @@ public abstract class BasePostingsFormatTestCase extends BaseIndexFileFormatTest Directory dir = newDirectory(); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + IndexWriterConfig iwc = newIndexWriterConfig(analyzer); // Must be concurrent because thread(s) can be merging // while up to one thread flushes, and each of those diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java index 01d12cd6877..8ea4b8a2b3e 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseStoredFieldsFormatTestCase.java @@ -81,7 +81,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat public void testRandomStoredFields() throws IOException { Directory dir = newDirectory(); Random rand = random(); - RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMaxBufferedDocs(TestUtil.nextInt(rand, 5, 20))); + RandomIndexWriter w = new RandomIndexWriter(rand, dir, newIndexWriterConfig(new MockAnalyzer(random())).setMaxBufferedDocs(TestUtil.nextInt(rand, 5, 20))); //w.w.setNoCFSRatio(0.0); final int docCount = atLeast(200); final int fieldCount = TestUtil.nextInt(rand, 1, 5); @@ -177,7 +177,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat // LUCENE-1727: make sure doc fields are stored in order public void testStoredFieldsOrder() throws Throwable { Directory d = newDirectory(); - IndexWriter w = new IndexWriter(d, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); Document doc = new Document(); FieldType customType = new FieldType(); @@ -212,7 +212,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat // LUCENE-1219 public void testBinaryFieldOffsetLength() throws IOException { Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); byte[] b = new byte[50]; for(int i=0;i<50;i++) b[i] = (byte) (i+77); @@ -330,7 +330,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat public void testReadSkip() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); @@ -383,7 +383,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat public void testEmptyDocs() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); @@ -408,7 +408,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat public void testConcurrentReads() throws Exception { Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); @@ -496,7 +496,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat otherCodec = new SimpleTextCodec(); } Directory dir = newDirectory(); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); @@ -531,7 +531,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat iw.w.addDocument(doc); if (random().nextBoolean() && (i % (data.length / 10) == 0)) { iw.w.shutdown(); - IndexWriterConfig iwConfNew = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConfNew = newIndexWriterConfig(new MockAnalyzer(random())); // test merging against a non-compressing codec if (iwConf.getCodec() == otherCodec) { iwConfNew.setCodec(Codec.getDefault()); @@ -589,7 +589,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat // we can't just use newFSDirectory, because this test doesn't really index anything. // so if we get NRTCachingDir+SimpleText, we make massive stored fields and OOM (LUCENE-4484) Directory dir = new MockDirectoryWrapper(random(), new MMapDirectory(createTempDir("testBigDocuments"))); - IndexWriterConfig iwConf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwConf = newIndexWriterConfig(new MockAnalyzer(random())); iwConf.setMaxBufferedDocs(RandomInts.randomIntBetween(random(), 2, 30)); RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConf); @@ -654,7 +654,7 @@ public abstract class BaseStoredFieldsFormatTestCase extends BaseIndexFileFormat public void testBulkMergeWithDeletes() throws IOException { final int numDocs = atLeast(200); Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); + RandomIndexWriter w = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(NoMergePolicy.INSTANCE)); for (int i = 0; i < numDocs; ++i) { Document doc = new Document(); doc.add(new StringField("id", Integer.toString(i), Store.YES)); diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java index 6c0c6063f97..d3a2ca5dd00 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java @@ -442,7 +442,7 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas } MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); - final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); + final IndexWriterConfig conf = newIndexWriterConfig(analyzer); conf.setInfoStream(new FailOnNonBulkMergesInfoStream()); if (conf.getMergePolicy() instanceof MockRandomMergePolicy) { ((MockRandomMergePolicy)conf.getMergePolicy()).setDoNonBulkMerges(false); diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java index 4aa68503a62..5418a9018ef 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java @@ -871,8 +871,8 @@ public abstract class LuceneTestCase extends Assert { } /** create a new index writer config with random defaults */ - public static IndexWriterConfig newIndexWriterConfig(Version v, Analyzer a) { - return newIndexWriterConfig(random(), v, a); + public static IndexWriterConfig newIndexWriterConfig(Analyzer a) { + return newIndexWriterConfig(random(), TEST_VERSION_CURRENT, a); } /** create a new index writer config with random defaults using the specified random */ diff --git a/solr/core/src/test/org/apache/solr/search/TestStressLucene.java b/solr/core/src/test/org/apache/solr/search/TestStressLucene.java index 66755aa6fc6..913db573db5 100644 --- a/solr/core/src/test/org/apache/solr/search/TestStressLucene.java +++ b/solr/core/src/test/org/apache/solr/search/TestStressLucene.java @@ -105,7 +105,7 @@ public class TestStressLucene extends TestRTGBase { Directory dir = newDirectory(); - final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))); + final RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.setDoRandomForceMergeAssert(false); // writer.commit(); diff --git a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java index 6b780514652..175db62db9c 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java @@ -222,7 +222,7 @@ public class TestOrdValues extends LuceneTestCase { // prepare a small index with just a few documents. dir = newDirectory(); anlzr = new MockAnalyzer(random()); - IndexWriterConfig iwc = newIndexWriterConfig( TEST_VERSION_CURRENT, anlzr).setMergePolicy(newLogMergePolicy()); + IndexWriterConfig iwc = newIndexWriterConfig(anlzr).setMergePolicy(newLogMergePolicy()); if (doMultiSegment) { iwc.setMaxBufferedDocs(TestUtil.nextInt(random(), 2, 7)); } diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 66fb541584e..5ff2812ce58 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -289,12 +289,12 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase { } /** sets system properties based on - * {@link #newIndexWriterConfig(org.apache.lucene.util.Version, org.apache.lucene.analysis.Analyzer)} + * {@link #newIndexWriterConfig(org.apache.lucene.analysis.Analyzer)} * * configs can use these system properties to vary the indexwriter settings */ public static void newRandomConfig() { - IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())); + IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); System.setProperty("useCompoundFile", String.valueOf(iwc.getUseCompoundFile()));