LUCENE-5871: Remove Version from IndexWriterConfig

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1617004 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Ryan Ernst 2014-08-09 18:54:35 +00:00
parent 9938a39a87
commit 872bd9145c
451 changed files with 2023 additions and 2240 deletions

View File

@ -85,6 +85,11 @@ API Changes
release.
(Ryan Ernst, Robert Muir)
* LUCENE-5871: Remove Version from IndexWriterConfig. Use
IndexWriterConfig.setCommitOnClose to change the behavior of IndexWriter.close().
The default has been changed to match that of 4.x.
(Ryan Ernst, Mike McCandless)
Documentation
* LUCENE-5392: Add/improve analysis package documentation to reflect

View File

@ -253,7 +253,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
*/
public void testWickedLongTerm() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new ClassicAnalyzer()));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new ClassicAnalyzer()));
char[] chars = new char[IndexWriter.MAX_TERM_LENGTH];
Arrays.fill(chars, 'x');
@ -269,7 +269,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
doc = new Document();
doc.add(new TextField("content", "abc bbb ccc", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
@ -301,9 +301,9 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
doc.add(new TextField("content", bigTerm, Field.Store.NO));
ClassicAnalyzer sa = new ClassicAnalyzer();
sa.setMaxTokenLength(100000);
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, sa));
writer = new IndexWriter(dir, new IndexWriterConfig(sa));
writer.addDocument(doc);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
assertEquals(1, reader.docFreq(new Term("content", bigTerm)));
reader.close();

View File

@ -49,14 +49,14 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer()));
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(new SimpleAnalyzer()));
Document doc = new Document();
doc.add(new StringField("partnum", "Q36", Field.Store.YES));
doc.add(new TextField("description", "Illidium Space Modulator", Field.Store.YES));
writer.addDocument(doc);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(directory);
searcher = newSearcher(reader);
@ -86,14 +86,14 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new TextField("partnum", "Q36", Field.Store.YES));
writer.addDocument(doc);
doc = new Document();
doc.add(new TextField("partnum", "Q37", Field.Store.YES));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
DocsEnum td = TestUtil.docs(random(),

View File

@ -19,7 +19,6 @@ package org.apache.lucene.analysis.miscellaneous;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.TokenStream;
@ -28,7 +27,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
public class TestEmptyTokenStream extends BaseTokenStreamTestCase {
@ -66,7 +64,7 @@ public class TestEmptyTokenStream extends BaseTokenStreamTestCase {
assertEquals(1, writer.numDocs());
writer.shutdown();
writer.close();
directory.close();
}

View File

@ -67,8 +67,7 @@ public class TestLimitTokenCountAnalyzer extends BaseTokenStreamTestCase {
mock.setEnableChecks(consumeAll);
Analyzer a = new LimitTokenCountAnalyzer(mock, limit, consumeAll);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig
(TEST_VERSION_CURRENT, a));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(a));
Document doc = new Document();
StringBuilder b = new StringBuilder();
@ -78,7 +77,7 @@ public class TestLimitTokenCountAnalyzer extends BaseTokenStreamTestCase {
b.append(" z");
doc.add(newTextField("field", b.toString(), Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
Term t = new Term("field", "x");

View File

@ -42,7 +42,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
super.setUp();
dir = new RAMDirectory();
appAnalyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, appAnalyzer));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(appAnalyzer));
int numDocs = 200;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
@ -52,7 +52,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
doc.add(new TextField("repetitiveField", repetitiveFieldValue, Field.Store.YES));
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
}

View File

@ -58,7 +58,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
super.setUp();
analyzer = new ShingleAnalyzerWrapper(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false), 2);
directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(analyzer));
Document doc;
doc = new Document();
@ -73,7 +73,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
doc.add(new TextField("content", "a sentence which contains no test", Field.Store.YES));
writer.addDocument(doc);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(directory);
searcher = newSearcher(reader);

View File

@ -22,7 +22,6 @@ import java.util.Locale;
import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
@ -104,7 +103,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
doc.add(f1);
doc.add(f2);
w.addDocument(doc);
w.shutdown();
w.close();
IndexReader r = DirectoryReader.open(dir);
Terms vector = r.getTermVectors(0).terms("field");

View File

@ -67,7 +67,7 @@ public class TestCollationDocValuesField extends LuceneTestCase {
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
@ -102,7 +102,7 @@ public class TestCollationDocValuesField extends LuceneTestCase {
}
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
int numChecks = atLeast(100);

View File

@ -67,7 +67,7 @@ public class TestICUCollationDocValuesField extends LuceneTestCase {
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
@ -102,7 +102,7 @@ public class TestICUCollationDocValuesField extends LuceneTestCase {
}
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
int numChecks = atLeast(100);

View File

@ -31,7 +31,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@ -69,7 +68,7 @@ public class UIMABaseAnalyzerTest extends BaseTokenStreamTestCase {
@Test
public void baseUIMAAnalyzerIntegrationTest() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add the first doc
Document doc = new Document();
String dummyTitle = "this is a dummy title ";
@ -114,7 +113,7 @@ public class UIMABaseAnalyzerTest extends BaseTokenStreamTestCase {
// do a matchalldocs query to retrieve both docs
result = indexSearcher.search(new MatchAllDocsQuery(), 2);
assertEquals(2, result.totalHits);
writer.shutdown();
writer.close();
indexSearcher.getIndexReader().close();
dir.close();
}

View File

@ -143,7 +143,7 @@ public class PerfRunData implements Closeable {
@Override
public void close() throws IOException {
if (indexWriter != null) {
indexWriter.shutdown();
indexWriter.close();
}
IOUtils.close(indexReader, directory,
taxonomyWriter, taxonomyReader, taxonomyDir,
@ -164,7 +164,7 @@ public class PerfRunData implements Closeable {
// cleanup index
if (indexWriter != null) {
indexWriter.shutdown();
indexWriter.close();
}
IOUtils.close(indexReader, directory);
indexWriter = null;

View File

@ -48,7 +48,7 @@ public class CloseIndexTask extends PerfTask {
if (doWait == false) {
iw.abortMerges();
}
iw.shutdown();
iw.close();
getRunData().setIndexWriter(null);
}
return 1;

View File

@ -99,8 +99,7 @@ public class CreateIndexTask extends PerfTask {
public static IndexWriterConfig createWriterConfig(Config config, PerfRunData runData, OpenMode mode, IndexCommit commit) {
@SuppressWarnings("deprecation")
Version version = Version.parseLeniently(config.get("writer.version", Version.LUCENE_CURRENT.toString()));
IndexWriterConfig iwConf = new IndexWriterConfig(version, runData.getAnalyzer());
IndexWriterConfig iwConf = new IndexWriterConfig(runData.getAnalyzer());
iwConf.setOpenMode(mode);
IndexDeletionPolicy indexDeletionPolicy = getIndexDeletionPolicy(config);
iwConf.setIndexDeletionPolicy(indexDeletionPolicy);

View File

@ -100,9 +100,9 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
iw.shutdown();
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
ir.close();
@ -192,8 +192,8 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.shutdown();
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
ir.close();
@ -232,8 +232,8 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.shutdown();
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
ir.close();
@ -305,8 +305,8 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.shutdown();
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
ir.close();
@ -436,9 +436,9 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
iw.shutdown();
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals(numLines + " lines were created but " + ir.numDocs() + " docs are in the index", numLines, ir.numDocs());
@ -661,7 +661,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertTrue("did not use the specified MergeScheduler",
((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getConfig()
.getMergeScheduler()).called);
benchmark.getRunData().getIndexWriter().shutdown();
benchmark.getRunData().getIndexWriter().close();
// 3. test number of docs in the index
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
@ -707,7 +707,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
// 2. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
assertTrue("did not use the specified MergePolicy", ((MyMergePolicy) benchmark.getRunData().getIndexWriter().getConfig().getMergePolicy()).called);
benchmark.getRunData().getIndexWriter().shutdown();
benchmark.getRunData().getIndexWriter().close();
// 3. test number of docs in the index
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
@ -752,7 +752,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB());
assertEquals(3, ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor());
assertEquals(0.0d, writer.getConfig().getMergePolicy().getNoCFSRatio(), 0.0);
writer.shutdown();
writer.close();
Directory dir = benchmark.getRunData().getDirectory();
IndexReader reader = DirectoryReader.open(dir);
Fields tfv = reader.getTermVectors(0);

View File

@ -30,8 +30,6 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
import org.junit.BeforeClass;
/** Tests the functionality of {@link AddIndexesTask}. */
@ -47,11 +45,11 @@ public class AddIndexesTaskTest extends BenchmarkTestCase {
inputDir = new File(testDir, "input");
Directory tmpDir = newFSDirectory(inputDir);
try {
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
IndexWriter writer = new IndexWriter(tmpDir, new IndexWriterConfig(null));
for (int i = 0; i < 10; i++) {
writer.addDocument(new Document());
}
writer.shutdown();
writer.close();
} finally {
tmpDir.close();
}

View File

@ -31,7 +31,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Version;
import java.io.IOException;
@ -70,9 +69,9 @@ public class DatasetSplitter {
// create IWs for train / test / cv IDXs
// :Post-Release-Update-Version.LUCENE_XY:
IndexWriter testWriter = new IndexWriter(testIndex, new IndexWriterConfig(Version.LUCENE_5_0, analyzer));
IndexWriter cvWriter = new IndexWriter(crossValidationIndex, new IndexWriterConfig(Version.LUCENE_5_0, analyzer));
IndexWriter trainingWriter = new IndexWriter(trainingIndex, new IndexWriterConfig(Version.LUCENE_5_0, analyzer));
IndexWriter testWriter = new IndexWriter(testIndex, new IndexWriterConfig(analyzer));
IndexWriter cvWriter = new IndexWriter(crossValidationIndex, new IndexWriterConfig(analyzer));
IndexWriter trainingWriter = new IndexWriter(trainingIndex, new IndexWriterConfig(analyzer));
try {
int size = originalIndex.maxDoc();
@ -128,9 +127,9 @@ public class DatasetSplitter {
cvWriter.commit();
trainingWriter.commit();
// close IWs
testWriter.shutdown();
cvWriter.shutdown();
trainingWriter.shutdown();
testWriter.close();
cvWriter.close();
trainingWriter.close();
}
}

View File

@ -74,7 +74,7 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
@After
public void tearDown() throws Exception {
super.tearDown();
indexWriter.shutdown();
indexWriter.close();
dir.close();
}

View File

@ -86,7 +86,7 @@ public class DataSplitterTest extends LuceneTestCase {
@After
public void tearDown() throws Exception {
originalIndex.close();
indexWriter.shutdown();
indexWriter.close();
dir.close();
super.tearDown();
}

View File

@ -237,7 +237,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
public void testFloorBlocks() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
for(int i=0;i<128;i++) {
Document doc = new Document();
@ -275,7 +275,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
public void testNonRootFloorBlocks() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
List<String> terms = new ArrayList<>();
for(int i=0;i<36;i++) {
@ -321,7 +321,7 @@ public class TestOrdsBlockTree extends BasePostingsFormatTestCase {
public void testSeveralNonRootBlocks() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
List<String> terms = new ArrayList<>();
for(int i=0;i<30;i++) {

View File

@ -78,7 +78,7 @@ public class Test10KPulsings extends LuceneTestCase {
}
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
TermsEnum te = MultiFields.getTerms(ir, "field").iterator(null);
DocsEnum de = null;
@ -136,7 +136,7 @@ public class Test10KPulsings extends LuceneTestCase {
}
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
TermsEnum te = MultiFields.getTerms(ir, "field").iterator(null);
DocsEnum de = null;

View File

@ -52,7 +52,7 @@ public class TestPulsingReuse extends LuceneTestCase {
doc.add(new TextField("foo", "a b b c c c d e f g g h i i j j k", Field.Store.NO));
iw.addDocument(doc);
DirectoryReader ir = iw.getReader();
iw.shutdown();
iw.close();
AtomicReader segment = getOnlySegmentReader(ir);
DocsEnum reuse = null;
@ -93,7 +93,7 @@ public class TestPulsingReuse extends LuceneTestCase {
// but this seems 'good enough' for now.
iw.addDocument(doc);
DirectoryReader ir = iw.getReader();
iw.shutdown();
iw.close();
AtomicReader segment = getOnlySegmentReader(ir);
DocsEnum reuse = null;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.util.CommandLineUtil;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.PrintStreamInfoStream;
import org.apache.lucene.util.Version;
import java.io.File;
import java.io.IOException;
@ -42,7 +41,7 @@ import java.util.Collection;
* refuses to run by default. Specify {@code -delete-prior-commits}
* to override this, allowing the tool to delete all but the last commit.
* From Java code this can be enabled by passing {@code true} to
* {@link #IndexUpgrader(Directory,Version,InfoStream,boolean)}.
* {@link #IndexUpgrader(Directory,InfoStream,boolean)}.
* <p><b>Warning:</b> This tool may reorder documents if the index was partially
* upgraded before execution (e.g., documents were added). If your application relies
* on &quot;monotonicity&quot; of doc IDs (which means that the order in which the documents
@ -109,7 +108,7 @@ public final class IndexUpgrader {
} else {
dir = CommandLineUtil.newFSDirectory(dirImpl, new File(path));
}
return new IndexUpgrader(dir, Version.LUCENE_CURRENT, out, deletePriorCommits);
return new IndexUpgrader(dir, out, deletePriorCommits);
}
private final Directory dir;
@ -118,15 +117,15 @@ public final class IndexUpgrader {
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* {@code matchVersion}. The tool refuses to upgrade indexes with multiple commit points. */
public IndexUpgrader(Directory dir, Version matchVersion) {
this(dir, new IndexWriterConfig(matchVersion, null), false);
public IndexUpgrader(Directory dir) {
this(dir, new IndexWriterConfig(null), false);
}
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
* {@code matchVersion}. You have the possibility to upgrade indexes with multiple commit points by removing
* all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */
public IndexUpgrader(Directory dir, Version matchVersion, InfoStream infoStream, boolean deletePriorCommits) {
this(dir, new IndexWriterConfig(matchVersion, null), deletePriorCommits);
public IndexUpgrader(Directory dir, InfoStream infoStream, boolean deletePriorCommits) {
this(dir, new IndexWriterConfig(null), deletePriorCommits);
if (null != infoStream) {
this.iwc.setInfoStream(infoStream);
}
@ -168,7 +167,7 @@ public final class IndexUpgrader {
infoStream.message("IndexUpgrader", "All segments upgraded to version " + Constants.LUCENE_MAIN_VERSION);
}
} finally {
w.shutdown();
w.close();
}
}

View File

@ -64,7 +64,6 @@ import org.apache.lucene.util.Constants;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.Version;
/**
An <code>IndexWriter</code> creates and maintains an index.
@ -891,48 +890,22 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
}
}
/** Gracefully shuts down this {@code IndexWriter} instance,
* by writing any changes, waiting for any running
* merges, committing, and closing. If you don't want to
* wait for merges, use {@link #shutdown(boolean)} instead.
*
* <p>If you called prepareCommit but failed to call
* commit, this method will throw {@code
* IllegalStateException} and the {@code IndexWriter}
* will not be closed.
*
* <p>If this method throws any other
* exception, the {@code IndexWriter} will be closed, but
* changes may have been lost.
*
* <p><b>NOTE</b>: You must ensure no
* other threads are still making changes at the same
* time that this method is invoked. */
public void shutdown() throws IOException {
shutdown(true);
}
/** Gracefully shut down this {@code IndexWriter}
* instance, with control over whether to wait for
* merges. See {@link #shutdown()}. */
public void shutdown(boolean waitForMerges) throws IOException {
/**
* Implementation for {@link #close()} when {@link IndexWriterConfig#commitOnClose} is true.
*/
private void shutdown() throws IOException {
if (pendingCommit != null) {
throw new IllegalStateException("cannot shutdown: prepareCommit was already called with no corresponding call to commit");
throw new IllegalStateException("cannot close: prepareCommit was already called with no corresponding call to commit");
}
if (infoStream.isEnabled("IW")) {
infoStream.message("IW", "now flush at shutdown");
infoStream.message("IW", "now flush at close");
}
boolean success = false;
try {
flush(waitForMerges, true);
finishMerges(waitForMerges);
flush(true, true);
finishMerges(true);
commit();
// TODO: we could just call rollback, but ... it's nice
// to catch IW bugs where after waitForMerges/commit we
// still have running merges / uncommitted changes, or
// tests that illegally leave threads indexing and then
// try to use shutdown:
close();
rollback(); // ie close, since we just committed
success = true;
} finally {
if (success == false) {
@ -945,58 +918,33 @@ public class IndexWriter implements Closeable, TwoPhaseCommit, Accountable {
}
}
}
/**
* Closes all open resources and releases the write lock.
* If there are running merges or uncommitted
* changes:
*
* If {@link IndexWriterConfig#commitOnClose} is <code>true</code>,
* this will attempt to gracefully shut down by writing any
* changes, waiting for any running merges, committing, and closing.
* In this case, note that:
* <ul>
* <li> If config.matchVersion >= LUCENE_5_0 then the
* changes are silently discarded.
* <li> Otherwise, a RuntimeException is thrown to
* indicate what was lost, but the IndexWriter is
* still closed.
* <li>If you called prepareCommit but failed to call commit, this
* method will throw {@code IllegalStateException} and the {@code IndexWriter}
* will not be closed.</li>
* <li>If this method throws any other exception, the {@code IndexWriter}
* will be closed, but changes may have been lost.</li>
* </ul>
*
* Use {@link #shutdown} if you want to flush, commit, and
* wait for merges, before closing.
*
* @throws IOException if there is a low-level IO error
* (the IndexWriter will still be closed)
* @throws RuntimeException if config.matchVersion <
* LUCENE_5_0 and there were pending changes that were
* lost (the IndexWriter will still be closed)
* <p><b>NOTE</b>: You must ensure no other threads are still making
* changes at the same time that this method is invoked.</p>
*/
@Override
public void close() throws IOException {
// If there are uncommitted changes, or still running
// merges, we will in fact close, but we'll throw an
// exception notifying the caller that they lost
// changes, if IWC.matchVersion is < 5.0:
boolean lostChanges = false;
// Only check for lost changes if the version earlier than 5.0:
if (config.getMatchVersion().onOrAfter(Version.LUCENE_5_0) == false) {
lostChanges = hasUncommittedChanges();
if (lostChanges == false) {
synchronized(this) {
if (pendingMerges.isEmpty() == false) {
lostChanges = true;
}
if (runningMerges.isEmpty() == false) {
lostChanges = true;
}
}
if (config.getCommitOnClose()) {
if (closed == false) {
shutdown();
}
}
// As long as there are no pending changes and no
// running merges, we just rollback to close:
rollback();
if (lostChanges) {
throw new RuntimeException("this writer is closed, but some pending changes or running merges were discarded; use shutdown to save pending changes and finish merges before closing");
} else {
rollback();
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.PrintStreamInfoStream;
import org.apache.lucene.util.SetOnce;
import org.apache.lucene.util.SetOnce.AlreadySetException;
import org.apache.lucene.util.Version;
/**
* Holds all the configuration that is used to create an {@link IndexWriter}.
@ -115,6 +114,9 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
* merging segments (set to <code>false</code>). You can set this
* to <code>true</code> for additional safety. */
public final static boolean DEFAULT_CHECK_INTEGRITY_AT_MERGE = false;
/** Default value for whether calls to {@link IndexWriter#close()} include a commit. */
public final static boolean DEFAULT_COMMIT_ON_CLOSE = true;
/**
* Sets the default (for any instance) maximum time to wait for a write lock
@ -150,8 +152,7 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
}
/**
* Creates a new config that with defaults that match the specified
* {@link Version} as well as the default {@link
* Creates a new config that with the default {@link
* Analyzer}. By default, {@link TieredMergePolicy} is used
* for merging;
* Note that {@link TieredMergePolicy} is free to select
@ -160,8 +161,8 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
* should switch to {@link LogByteSizeMergePolicy} or
* {@link LogDocMergePolicy}.
*/
public IndexWriterConfig(Version matchVersion, Analyzer analyzer) {
super(analyzer, matchVersion);
public IndexWriterConfig(Analyzer analyzer) {
super(analyzer);
}
/** Specifies {@link OpenMode} of the index.
@ -520,6 +521,15 @@ public final class IndexWriterConfig extends LiveIndexWriterConfig {
return (IndexWriterConfig) super.setUseCompoundFile(useCompoundFile);
}
/**
* Sets if calls {@link IndexWriter#close()} should first commit
* before closing. Use <code>true</code> to match behavior of Lucene 4.x.
*/
public IndexWriterConfig setCommitOnClose(boolean commitOnClose) {
this.commitOnClose = commitOnClose;
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(super.toString());

View File

@ -25,7 +25,6 @@ import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.Version;
/**
* Holds all the configuration used by {@link IndexWriter} with few setters for
@ -92,19 +91,18 @@ public class LiveIndexWriterConfig {
* segment, after which the segment is forced to flush. */
protected volatile int perThreadHardLimitMB;
/** {@link Version} that {@link IndexWriter} should emulate. */
protected final Version matchVersion;
/** True if segment flushes should use compound file format */
protected volatile boolean useCompoundFile = IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM;
/** True if merging should check integrity of segments before merge */
protected volatile boolean checkIntegrityAtMerge = IndexWriterConfig.DEFAULT_CHECK_INTEGRITY_AT_MERGE;
/** True if calls to {@link IndexWriter#close()} should first do a commit. */
protected boolean commitOnClose = IndexWriterConfig.DEFAULT_COMMIT_ON_CLOSE;
// used by IndexWriterConfig
LiveIndexWriterConfig(Analyzer analyzer, Version matchVersion) {
LiveIndexWriterConfig(Analyzer analyzer) {
this.analyzer = analyzer;
this.matchVersion = matchVersion;
ramBufferSizeMB = IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
maxBufferedDocs = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
maxBufferedDeleteTerms = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
@ -465,11 +463,17 @@ public class LiveIndexWriterConfig {
public boolean getCheckIntegrityAtMerge() {
return checkIntegrityAtMerge;
}
/**
* Returns <code>true</code> if {@link IndexWriter#close()} should first commit before closing.
*/
public boolean getCommitOnClose() {
return commitOnClose;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("matchVersion=").append(matchVersion).append("\n");
sb.append("analyzer=").append(analyzer == null ? "null" : analyzer.getClass().getName()).append("\n");
sb.append("ramBufferSizeMB=").append(getRAMBufferSizeMB()).append("\n");
sb.append("maxBufferedDocs=").append(getMaxBufferedDocs()).append("\n");
@ -491,12 +495,7 @@ public class LiveIndexWriterConfig {
sb.append("perThreadHardLimitMB=").append(getRAMPerThreadHardLimitMB()).append("\n");
sb.append("useCompoundFile=").append(getUseCompoundFile()).append("\n");
sb.append("checkIntegrityAtMerge=").append(getCheckIntegrityAtMerge()).append("\n");
sb.append("commitOnClose=").append(getCommitOnClose()).append("\n");
return sb.toString();
}
/** Returns the {@code matchVersion} that was provided to
* the constructor. */
public Version getMatchVersion() {
return matchVersion;
}
}

View File

@ -112,7 +112,7 @@ public class IndexSearcher {
/** Runs searches for each segment separately, using the
* provided ExecutorService. IndexSearcher will not
* shutdown/awaitTermination this ExecutorService on
* close/awaitTermination this ExecutorService on
* close; you must do so, eventually, on your own. NOTE:
* if you are using {@link NIOFSDirectory}, do not use
* the shutdownNow method of ExecutorService as this uses
@ -130,7 +130,7 @@ public class IndexSearcher {
* <p>
* Given a non-<code>null</code> {@link ExecutorService} this method runs
* searches for each segment separately, using the provided ExecutorService.
* IndexSearcher will not shutdown/awaitTermination this ExecutorService on
* IndexSearcher will not close/awaitTermination this ExecutorService on
* close; you must do so, eventually, on your own. NOTE: if you are using
* {@link NIOFSDirectory}, do not use the shutdownNow method of
* ExecutorService as this uses Thread.interrupt under-the-hood which can

View File

@ -53,7 +53,7 @@ public class TestDemo extends LuceneTestCase {
String text = "This is the text to be indexed. " + longTerm;
doc.add(newTextField("fieldname", text, Field.Store.YES));
iwriter.addDocument(doc);
iwriter.shutdown();
iwriter.close();
// Now search the index:
IndexReader ireader = DirectoryReader.open(directory); // read-only=true

View File

@ -130,7 +130,7 @@ public class TestExternalCodecs extends LuceneTestCase {
}
r.close();
w.shutdown();
w.close();
dir.close();
}

View File

@ -103,7 +103,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
writer.addDocument(doc);
((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.shutdown();
writer.close();
assertTrue(mergeThreadCreated);
assertTrue(mergeCalled);
@ -134,7 +134,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
// compiles. But ensure that it can be used as well, e.g., no other hidden
// dependencies or something. Therefore, don't use any random API !
Directory dir = new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig conf = new IndexWriterConfig(null);
conf.setMergeScheduler(new ReportingMergeScheduler());
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(new Document());
@ -142,7 +142,7 @@ public class TestMergeSchedulerExternal extends LuceneTestCase {
writer.addDocument(new Document());
writer.commit(); // trigger flush
writer.forceMerge(1);
writer.shutdown();
writer.close();
dir.close();
}

View File

@ -50,7 +50,7 @@ public class TestSearch extends LuceneTestCase {
d.add(newTextField("foo", "bar", Field.Store.YES));
writer.addDocument(d);
} finally {
writer.shutdown();
writer.close();
}
IndexReader reader = DirectoryReader.open(directory);
@ -130,7 +130,7 @@ public class TestSearch extends LuceneTestCase {
d.add(new IntField("id", j, Field.Store.NO));
writer.addDocument(d);
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher searcher = newSearcher(reader);

View File

@ -85,7 +85,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
d.add(new NumericDocValuesField(ID_FIELD, j));
writer.addDocument(d);
}
writer.shutdown();
writer.close();
// try a search without OR
IndexReader reader = DirectoryReader.open(directory);

View File

@ -97,7 +97,7 @@ public class TestCachingTokenFilter extends BaseTokenStreamTestCase {
assertEquals(1, termPositions.freq());
assertEquals(2, termPositions.nextPosition());
reader.close();
writer.shutdown();
writer.close();
// 3) reset stream and consume tokens again
stream.reset();
checkTokens(stream);

View File

@ -448,7 +448,7 @@ public class TestGraphTokenizers extends BaseTokenStreamTestCase {
final String s = a.toDot();
Writer w = new OutputStreamWriter(new FileOutputStream("/x/tmp/out.dot"));
w.write(s);
w.shutdown();
w.close();
System.out.println("TEST: saved to /x/tmp/out.dot");
}
*/

View File

@ -329,7 +329,7 @@ public class TestMockAnalyzer extends BaseTokenStreamTestCase {
assertEquals(1 + endOffset + offsetGap, dpe.endOffset());
assertEquals(null, te.next());
reader.close();
writer.shutdown();
writer.close();
writer.w.getDirectory().close();
}

View File

@ -89,7 +89,7 @@ public class TestCompressingStoredFieldsFormat extends BaseStoredFieldsFormatTes
}
// Only one .fdt and one .fdx files must have been found
assertEquals(2, counter);
iw.shutdown();
iw.close();
dir.close();
}
}

View File

@ -69,7 +69,7 @@ public class TestCompressingTermVectorsFormat extends BaseTermVectorsFormatTestC
// expected exception
}
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
}

View File

@ -98,7 +98,7 @@ public class TestLucene40PostingsReader extends LuceneTestCase {
// delete 1-100% of docs
iw.deleteDocuments(new Term("title", terms[random().nextInt(terms.length)]));
}
iw.shutdown();
iw.close();
dir.close(); // checkindex
}

View File

@ -116,7 +116,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
}
assertEquals(1, enums.size());
}
writer.shutdown();
writer.close();
IOUtils.close(open, dir);
}
@ -161,7 +161,7 @@ public class TestReuseDocsEnum extends LuceneTestCase {
}
assertEquals(terms.size(), enums.size());
}
writer.shutdown();
writer.close();
IOUtils.close(firstReader, secondReader, dir);
}

View File

@ -52,14 +52,14 @@ public class TestBlockPostingsFormat2 extends LuceneTestCase {
@Override
public void tearDown() throws Exception {
iw.shutdown();
iw.close();
TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(TestUtil.alwaysPostingsFormat(new Lucene41PostingsFormat()));
iwc.setOpenMode(OpenMode.APPEND);
IndexWriter iw = new IndexWriter(dir, iwc);
iw.forceMerge(1);
iw.shutdown();
iw.close();
dir.close(); // just force a checkindex for now
super.tearDown();
}

View File

@ -53,7 +53,6 @@ import org.apache.lucene.util.English;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.automaton.AutomatonTestUtil;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.automaton.RegExp;
@ -134,7 +133,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
field8.setStringValue(stringValue);
iw.addDocument(doc);
}
iw.shutdown();
iw.close();
verify(dir);
TestUtil.checkIndex(dir); // for some extra coverage, checkIndex before we forceMerge
iwc = newIndexWriterConfig(analyzer);
@ -142,7 +141,7 @@ public class TestBlockPostingsFormat3 extends LuceneTestCase {
iwc.setOpenMode(OpenMode.APPEND);
IndexWriter iw2 = new IndexWriter(dir, iwc);
iw2.forceMerge(1);
iw2.shutdown();
iw2.close();
verify(dir);
dir.close();
}

View File

@ -45,7 +45,7 @@ public class TestLucene41PostingsFormat extends BasePostingsFormatTestCase {
/** Make sure the final sub-block(s) are not skipped. */
public void testFinalBlock() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
for(int i=0;i<25;i++) {
Document doc = new Document();
doc.add(newStringField("field", Character.toString((char) (97+i)), Field.Store.NO));

View File

@ -47,7 +47,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
/**
* Basic tests of PerFieldDocValuesFormat
@ -100,7 +99,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
doc.add(new NumericDocValuesField("dv1", 5));
doc.add(new BinaryDocValuesField("dv2", new BytesRef("hello world")));
iwriter.addDocument(doc);
iwriter.shutdown();
iwriter.close();
// Now search the index:
IndexReader ireader = DirectoryReader.open(directory); // read-only=true

View File

@ -110,7 +110,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
TestUtil.checkIndex(dir);
writer.forceMerge(1);
assertEquals(30, writer.maxDoc());
writer.shutdown();
writer.close();
dir.close();
}
@ -138,7 +138,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
}
addDocs3(writer, 10);
writer.commit();
writer.shutdown();
writer.close();
assertQuery(new Term("content", "ccc"), dir, 10);
assertQuery(new Term("content", "aaa"), dir, 10);
@ -179,7 +179,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
}
writer.forceMerge(1);
assertEquals(40, writer.maxDoc());
writer.shutdown();
writer.close();
assertQuery(new Term("content", "ccc"), dir, 10);
assertQuery(new Term("content", "bbb"), dir, 20);
assertQuery(new Term("content", "aaa"), dir, 10);
@ -242,7 +242,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
for (int i = 0; i < numRounds; i++) {
int num = TestUtil.nextInt(random(), 30, 60);
IndexWriterConfig config = newIndexWriterConfig(random(),
TEST_VERSION_CURRENT, new MockAnalyzer(random()));
new MockAnalyzer(random()));
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
IndexWriter writer = newWriter(dir, config);
for (int j = 0; j < docsPerRound; j++) {
@ -262,7 +262,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
}
writer.commit();
assertEquals((i + 1) * docsPerRound, writer.maxDoc());
writer.shutdown();
writer.close();
}
dir.close();
}
@ -319,7 +319,7 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase {
dateField.setStringValue(Integer.toString(random().nextInt(100)));
iw.addDocument(doc);
}
iw.shutdown();
iw.close();
dir.close(); // checkindex
}
}

View File

@ -3,7 +3,6 @@ package org.apache.lucene.document;
import java.nio.charset.StandardCharsets;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.store.Directory;
@ -72,7 +71,7 @@ public class TestBinaryDocument extends LuceneTestCase {
String stringFldStoredTest = docFromReader.get("stringStored");
assertTrue(stringFldStoredTest.equals(binaryValStored));
writer.shutdown();
writer.close();
reader.close();
dir.close();
}
@ -101,7 +100,7 @@ public class TestBinaryDocument extends LuceneTestCase {
assertTrue(binaryFldCompressedTest.equals(binaryValCompressed));
assertTrue(CompressionTools.decompressString(docFromReader.getBinaryValue("stringCompressed")).equals(binaryValCompressed));
writer.shutdown();
writer.close();
reader.close();
dir.close();
}

View File

@ -221,7 +221,7 @@ public class TestDocument extends LuceneTestCase {
assertEquals(1, hits.length);
doAssert(searcher.doc(hits[0].doc));
writer.shutdown();
writer.close();
reader.close();
dir.close();
}
@ -253,7 +253,7 @@ public class TestDocument extends LuceneTestCase {
assertEquals(1, hits.length);
doAssert(searcher.doc(hits[0].doc));
writer.shutdown();
writer.close();
reader.close();
dir.close();
}
@ -344,7 +344,7 @@ public class TestDocument extends LuceneTestCase {
else if (f.stringValue().equals("id3")) result |= 4;
else fail("unexpected id field");
}
writer.shutdown();
writer.close();
reader.close();
dir.close();
assertEquals("did not see all IDs", 7, result);
@ -381,7 +381,7 @@ public class TestDocument extends LuceneTestCase {
assertNull(sdoc.get("somethingElse"));
assertArrayEquals(new String[] { "5", "4" }, sdoc.getValues("int"));
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
}

View File

@ -45,7 +45,7 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -71,7 +71,7 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
}
w.forceMerge(1);
w.shutdown();
w.close();
System.out.println("verifying...");
System.out.flush();
@ -104,7 +104,7 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -130,7 +130,7 @@ public class Test2BBinaryDocValues extends LuceneTestCase {
}
w.forceMerge(1);
w.shutdown();
w.close();
System.out.println("verifying...");
System.out.flush();

View File

@ -42,7 +42,7 @@ public class Test2BNumericDocValues extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -63,7 +63,7 @@ public class Test2BNumericDocValues extends LuceneTestCase {
}
w.forceMerge(1);
w.shutdown();
w.close();
System.out.println("verifying...");
System.out.flush();

View File

@ -28,12 +28,9 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.Monster;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.junit.Ignore;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
@ -53,7 +50,7 @@ public class Test2BPositions extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -80,7 +77,7 @@ public class Test2BPositions extends LuceneTestCase {
}
}
w.forceMerge(1);
w.shutdown();
w.close();
dir.close();
}

View File

@ -29,9 +29,7 @@ import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.TestUtil;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
@ -50,7 +48,7 @@ public class Test2BPostings extends LuceneTestCase {
((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -80,7 +78,7 @@ public class Test2BPostings extends LuceneTestCase {
}
}
w.forceMerge(1);
w.shutdown();
w.close();
dir.close();
}

View File

@ -30,7 +30,6 @@ import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.store.BaseDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.LuceneTestCase.Monster;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
@ -54,7 +53,7 @@ public class Test2BPostingsBytes extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -85,7 +84,7 @@ public class Test2BPostingsBytes extends LuceneTestCase {
w.addDocument(doc);
}
w.forceMerge(1);
w.shutdown();
w.close();
DirectoryReader oneThousand = DirectoryReader.open(dir);
IndexReader subReaders[] = new IndexReader[1000];
@ -96,10 +95,10 @@ public class Test2BPostingsBytes extends LuceneTestCase {
((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w2 = new IndexWriter(dir2,
new IndexWriterConfig(TEST_VERSION_CURRENT, null));
new IndexWriterConfig(null));
w2.addIndexes(mr);
w2.forceMerge(1);
w2.shutdown();
w2.close();
oneThousand.close();
DirectoryReader oneMillion = DirectoryReader.open(dir2);
@ -111,10 +110,10 @@ public class Test2BPostingsBytes extends LuceneTestCase {
((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
}
IndexWriter w3 = new IndexWriter(dir3,
new IndexWriterConfig(TEST_VERSION_CURRENT, null));
new IndexWriterConfig(null));
w3.addIndexes(mr);
w3.forceMerge(1);
w3.shutdown();
w3.close();
oneMillion.close();
dir.close();

View File

@ -17,8 +17,6 @@ package org.apache.lucene.index;
* limitations under the License.
*/
import java.util.Random;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.SortedDocValuesField;
@ -45,7 +43,7 @@ public class Test2BSortedDocValues extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -69,7 +67,7 @@ public class Test2BSortedDocValues extends LuceneTestCase {
}
w.forceMerge(1);
w.shutdown();
w.close();
System.out.println("verifying...");
System.out.flush();
@ -100,7 +98,7 @@ public class Test2BSortedDocValues extends LuceneTestCase {
}
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -126,7 +124,7 @@ public class Test2BSortedDocValues extends LuceneTestCase {
}
w.forceMerge(1);
w.shutdown();
w.close();
System.out.println("verifying...");
System.out.flush();

View File

@ -181,7 +181,7 @@ public class Test2BTerms extends LuceneTestCase {
if (true) {
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -218,7 +218,7 @@ public class Test2BTerms extends LuceneTestCase {
System.out.println("TEST: full merge");
w.forceMerge(1);
System.out.println("TEST: close writer");
w.shutdown();
w.close();
}
System.out.println("TEST: open reader");

View File

@ -25,9 +25,7 @@ import org.apache.lucene.store.MMapDirectory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
@ -46,7 +44,7 @@ public class Test4GBStoredFields extends LuceneTestCase {
dir.setThrottling(MockDirectoryWrapper.Throttling.NEVER);
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
.setRAMBufferSizeMB(256.0)
.setMergeScheduler(new ConcurrentMergeScheduler())
@ -81,7 +79,7 @@ public class Test4GBStoredFields extends LuceneTestCase {
}
}
w.forceMerge(1);
w.shutdown();
w.close();
if (VERBOSE) {
boolean found = false;
for (String file : dir.listAll()) {

View File

@ -64,7 +64,7 @@ public class TestAddIndexes extends LuceneTestCase {
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.shutdown();
writer.close();
TestUtil.checkIndex(dir);
writer = newWriter(
@ -76,20 +76,20 @@ public class TestAddIndexes extends LuceneTestCase {
// add 40 documents in separate files
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.shutdown();
writer.close();
writer = newWriter(aux2, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
// add 50 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.shutdown();
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexes(aux, aux2);
assertEquals(190, writer.maxDoc());
writer.shutdown();
writer.close();
TestUtil.checkIndex(dir);
// make sure the old index is correct
@ -104,14 +104,14 @@ public class TestAddIndexes extends LuceneTestCase {
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.shutdown();
writer.close();
// test doc count before segments are merged
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexes(aux3);
assertEquals(230, writer.maxDoc());
writer.shutdown();
writer.close();
// make sure the new index is correct
verifyNumDocs(dir, 230);
@ -123,7 +123,7 @@ public class TestAddIndexes extends LuceneTestCase {
// now fully merge it.
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
writer.close();
// make sure the new index is correct
verifyNumDocs(dir, 230);
@ -136,13 +136,13 @@ public class TestAddIndexes extends LuceneTestCase {
Directory aux4 = newDirectory();
writer = newWriter(aux4, newIndexWriterConfig(new MockAnalyzer(random())));
addDocs2(writer, 1);
writer.shutdown();
writer.close();
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexes(aux4);
assertEquals(231, writer.maxDoc());
writer.shutdown();
writer.close();
verifyNumDocs(dir, 231);
@ -185,7 +185,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "aaa"), 1030);
verifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.shutdown();
writer.close();
dir.close();
aux.close();
}
@ -223,7 +223,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "aaa"), 1030);
verifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.shutdown();
writer.close();
dir.close();
aux.close();
}
@ -261,7 +261,7 @@ public class TestAddIndexes extends LuceneTestCase {
verifyTermDocs(dir, new Term("content", "aaa"), 1030);
verifyTermDocs(dir, new Term("content", "bbb"), 9);
writer.shutdown();
writer.close();
dir.close();
aux.close();
}
@ -279,7 +279,7 @@ public class TestAddIndexes extends LuceneTestCase {
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.shutdown();
writer.close();
writer = newWriter(
aux,
@ -290,7 +290,7 @@ public class TestAddIndexes extends LuceneTestCase {
);
// add 140 documents in separate files
addDocs(writer, 40);
writer.shutdown();
writer.close();
writer = newWriter(
aux,
newIndexWriterConfig(new MockAnalyzer(random())).
@ -299,7 +299,7 @@ public class TestAddIndexes extends LuceneTestCase {
setMergePolicy(newLogMergePolicy(false))
);
addDocs(writer, 100);
writer.shutdown();
writer.close();
writer = newWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
try {
@ -310,7 +310,7 @@ public class TestAddIndexes extends LuceneTestCase {
catch (IllegalArgumentException e) {
assertEquals(100, writer.maxDoc());
}
writer.shutdown();
writer.close();
// make sure the index is correct
verifyNumDocs(dir, 100);
@ -341,7 +341,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux);
assertEquals(1040, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.shutdown();
writer.close();
// make sure the index is correct
verifyNumDocs(dir, 1040);
@ -370,7 +370,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux);
assertEquals(1032, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.shutdown();
writer.close();
// make sure the index is correct
verifyNumDocs(dir, 1032);
@ -398,7 +398,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux, new MockDirectoryWrapper(random(), new RAMDirectory(aux, newIOContext(random()))));
assertEquals(1060, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.shutdown();
writer.close();
// make sure the index is correct
verifyNumDocs(dir, 1060);
@ -415,13 +415,13 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux, true);
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 20; i++) {
writer.deleteDocuments(new Term("id", "" + i));
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(aux);
assertEquals(10, reader.numDocs());
reader.close();
@ -440,7 +440,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux, new MockDirectoryWrapper(random(), new RAMDirectory(aux, newIOContext(random()))));
assertEquals(1020, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.shutdown();
writer.close();
dir.close();
aux.close();
}
@ -465,26 +465,26 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux);
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
writer.shutdown();
writer.close();
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig dontMergeConfig = new IndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(aux, dontMergeConfig);
for (int i = 0; i < 27; i++) {
writer.deleteDocuments(new Term("id", "" + i));
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(aux);
assertEquals(3, reader.numDocs());
reader.close();
dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
dontMergeConfig = new IndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(aux2, dontMergeConfig);
for (int i = 0; i < 8; i++) {
writer.deleteDocuments(new Term("id", "" + i));
}
writer.shutdown();
writer.close();
reader = DirectoryReader.open(aux2);
assertEquals(22, reader.numDocs());
reader.close();
@ -500,7 +500,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer.addIndexes(aux, aux2);
assertEquals(1040, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.shutdown();
writer.close();
dir.close();
aux.close();
aux2.close();
@ -563,7 +563,7 @@ public class TestAddIndexes extends LuceneTestCase {
}
assertEquals(1000, writer.maxDoc());
assertEquals(1, writer.getSegmentCount());
writer.shutdown();
writer.close();
writer = newWriter(
aux,
@ -579,7 +579,7 @@ public class TestAddIndexes extends LuceneTestCase {
} else {
addDocs(writer, 10);
}
writer.shutdown();
writer.close();
writer = newWriter(
aux,
newIndexWriterConfig(new MockAnalyzer(random())).
@ -590,7 +590,7 @@ public class TestAddIndexes extends LuceneTestCase {
}
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
writer.shutdown();
writer.close();
}
// LUCENE-1270
@ -622,7 +622,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc2.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType2));
for(int i=0;i<10;i++)
writer.addDocument(doc2);
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
lmp = new LogByteSizeMergePolicy();
@ -632,7 +632,7 @@ public class TestAddIndexes extends LuceneTestCase {
writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(lmp));
writer.addIndexes(dir);
writer.shutdown();
writer.close();
dir.close();
dir2.close();
}
@ -661,15 +661,14 @@ public class TestAddIndexes extends LuceneTestCase {
public RunAddIndexesThreads(int numCopy) throws Throwable {
NUM_COPY = numCopy;
dir = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++)
addDoc(writer);
writer.shutdown();
writer.close();
dir2 = newDirectory();
writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
writer2 = new IndexWriter(dir2, new IndexWriterConfig(new MockAnalyzer(random())));
writer2.commit();
@ -719,7 +718,7 @@ public class TestAddIndexes extends LuceneTestCase {
if (doWait == false) {
writer2.abortMerges();
}
//writer2.shutdown();
//writer2.close();
writer2.rollback();
}
@ -976,10 +975,10 @@ public class TestAddIndexes extends LuceneTestCase {
Document doc = new Document();
doc.add(new StringField("id", "myid", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
}
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dirs[0], conf);
// Now delete the document
@ -992,7 +991,7 @@ public class TestAddIndexes extends LuceneTestCase {
}
writer.commit();
assertEquals("Documents from the incoming index should not have been deleted", 1, writer.numDocs());
writer.shutdown();
writer.close();
for (Directory dir : dirs) {
dir.close();
@ -1025,7 +1024,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDocsWithID(writer, 100, 0);
assertEquals(100, writer.maxDoc());
writer.commit();
writer.shutdown();
writer.close();
TestUtil.checkIndex(dir);
writer = newWriter(
@ -1040,7 +1039,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.commit();
writer.shutdown();
writer.close();
writer = newWriter(
aux2,
@ -1052,7 +1051,7 @@ public class TestAddIndexes extends LuceneTestCase {
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.commit();
writer.shutdown();
writer.close();
// test doc count before segments are merged
writer = newWriter(
@ -1064,7 +1063,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(100, writer.maxDoc());
writer.addIndexes(aux, aux2);
assertEquals(190, writer.maxDoc());
writer.shutdown();
writer.close();
dir.close();
aux.close();
@ -1094,26 +1093,26 @@ public class TestAddIndexes extends LuceneTestCase {
Directory[] dirs = new Directory[2];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = new RAMDirectory();
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(new MockAnalyzer(random())));
Document d = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
d.add(new Field("c", "v", customType));
w.addDocument(d);
w.shutdown();
w.close();
}
IndexReader[] readers = new IndexReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
Directory dir = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
MergePolicy lmp = conf.getMergePolicy();
// Force creation of CFS:
lmp.setNoCFSRatio(1.0);
lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
IndexWriter w3 = new IndexWriter(dir, conf);
w3.addIndexes(readers);
w3.shutdown();
w3.close();
// we should now see segments_X,
// segments.gen,_Y.cfs,_Y.cfe, _Z.si
assertEquals("Only one compound segment should exist, but got: " + Arrays.toString(dir.listAll()), 5, dir.listAll().length);
@ -1143,7 +1142,7 @@ public class TestAddIndexes extends LuceneTestCase {
customType.setIndexed(true);
doc.add(newField("foo", "bar", customType));
w.addDocument(doc);
w.shutdown();
w.close();
}
{
@ -1157,7 +1156,7 @@ public class TestAddIndexes extends LuceneTestCase {
} catch (IllegalArgumentException ex) {
// expected
}
w.shutdown();
w.close();
IndexReader open = DirectoryReader.open(dir);
assertEquals(0, open.numDocs());
open.close();
@ -1182,7 +1181,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("id", "1", Field.Store.YES));
w.addDocument(doc);
IndexReader r1 = w.getReader();
w.shutdown();
w.close();
Directory d2 = newDirectory();
w = new RandomIndexWriter(random(), d2);
@ -1191,7 +1190,7 @@ public class TestAddIndexes extends LuceneTestCase {
doc.add(newStringField("id", "2", Field.Store.YES));
w.addDocument(doc);
IndexReader r2 = w.getReader();
w.shutdown();
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random(), d3);
@ -1202,7 +1201,7 @@ public class TestAddIndexes extends LuceneTestCase {
d2.close();
IndexReader r3 = w.getReader();
w.shutdown();
w.close();
assertEquals(2, r3.numDocs());
for(int docID=0;docID<2;docID++) {
StoredDocument d = r3.document(docID);
@ -1221,7 +1220,7 @@ public class TestAddIndexes extends LuceneTestCase {
RandomIndexWriter w = new RandomIndexWriter(random(), d1);
MultiReader empty = new MultiReader();
w.addIndexes(empty);
w.shutdown();
w.close();
DirectoryReader dr = DirectoryReader.open(d1);
for (AtomicReaderContext ctx : dr.leaves()) {
assertTrue("empty segments should be dropped by addIndexes", ctx.reader().maxDoc() > 0);
@ -1239,11 +1238,11 @@ public class TestAddIndexes extends LuceneTestCase {
RandomIndexWriter w = new RandomIndexWriter(random(), src);
w.addDocument(new Document());
IndexReader allDeletedReader = new AllDeletedFilterReader(w.getReader().leaves().get(0).reader());
w.shutdown();
w.close();
w = new RandomIndexWriter(random(), dest);
w.addIndexes(allDeletedReader);
w.shutdown();
w.close();
DirectoryReader dr = DirectoryReader.open(src);
for (AtomicReaderContext ctx : dr.leaves()) {
assertTrue("empty segments should be dropped by addIndexes", ctx.reader().maxDoc() > 0);
@ -1275,8 +1274,8 @@ public class TestAddIndexes extends LuceneTestCase {
// expected
}
w1.shutdown();
w2.shutdown();
w1.close();
w2.close();
IOUtils.close(src, dest);
}
}

View File

@ -60,7 +60,7 @@ public class TestAllFilesHaveChecksumFooter extends LuceneTestCase {
riw.deleteDocuments(new Term("id", Integer.toString(i)));
}
}
riw.shutdown();
riw.close();
checkHeaders(dir);
dir.close();
}

View File

@ -61,7 +61,7 @@ public class TestAllFilesHaveCodecHeader extends LuceneTestCase {
// riw.deleteDocuments(new Term("id", Integer.toString(i)));
// }
}
riw.shutdown();
riw.close();
checkHeaders(dir);
dir.close();
}

View File

@ -108,8 +108,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
TimedThread[] threads = new TimedThread[4];
IndexWriterConfig conf = new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(7);
((TieredMergePolicy) conf.getMergePolicy()).setMaxMergeAtOnce(3);
IndexWriter writer = RandomIndexWriter.mockIndexWriter(directory, conf, random());
@ -151,7 +150,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
searcherThread1.join();
searcherThread2.join();
writer.shutdown();
writer.close();
assertTrue("hit unexpected exception in indexer", !indexerThread.failed);
assertTrue("hit unexpected exception in indexer2", !indexerThread2.failed);

View File

@ -146,7 +146,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));
// TODO: remove randomness
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer)
IndexWriterConfig conf = new IndexWriterConfig(analyzer)
.setMergePolicy(mp);
conf.setCodec(Codec.forName("Lucene40"));
IndexWriter writer = new IndexWriter(dir, conf);
@ -154,7 +154,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
for(int i=0;i<50;i++) {
writer.addDocument(docs.nextDoc());
}
writer.shutdown();
writer.close();
dir.close();
// Gives you time to copy the index out!: (there is also
@ -182,7 +182,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
TestUtil.rm(indexDir);
Directory dir = newFSDirectory(indexDir);
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setUseCompoundFile(false).setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf);
// create an index w/ few doc-values fields, some with updates and some without
@ -217,7 +217,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
updateNumeric(writer, "22", "ndv1", "ndv1_c", 200L); // update the field again
writer.commit();
writer.shutdown();
writer.close();
dir.close();
}*/
@ -273,9 +273,8 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
final boolean streamType = random().nextBoolean();
final int choice = TestUtil.nextInt(random(), 0, 2);
switch (choice) {
case 0: return new IndexUpgrader(dir, TEST_VERSION_CURRENT);
case 1: return new IndexUpgrader(dir, TEST_VERSION_CURRENT,
streamType ? null : InfoStream.NO_OUTPUT, false);
case 0: return new IndexUpgrader(dir);
case 1: return new IndexUpgrader(dir, streamType ? null : InfoStream.NO_OUTPUT, false);
case 2: return new IndexUpgrader(dir, newIndexWriterConfig(null), false);
default: fail("case statement didn't get updated when random bounds changed");
}
@ -330,7 +329,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
try {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setCommitOnClose(false));
fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
} catch (IndexFormatTooOldException e) {
// pass
@ -347,7 +346,11 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// IndexFormatTooOldException, and we don't want to mask the fail()
// above, so close without waiting for merges.
if (writer != null) {
writer.shutdown(false);
try {
writer.commit();
} finally {
writer.close();
}
}
writer = null;
}
@ -370,10 +373,9 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
System.out.println("\nTEST: index=" + name);
}
Directory dir = newDirectory(oldIndexDirs.get(name));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
w.forceMerge(1);
w.shutdown();
w.close();
dir.close();
}
@ -390,7 +392,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: done adding indices; now close");
}
w.shutdown();
w.close();
targetDir.close();
}
@ -403,7 +405,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
Directory targetDir = newDirectory();
IndexWriter w = new IndexWriter(targetDir, newIndexWriterConfig(new MockAnalyzer(random())));
w.addIndexes(reader);
w.shutdown();
w.close();
reader.close();
targetDir.close();
@ -617,7 +619,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
expected = 45;
}
assertEquals("wrong doc count", expected, writer.numDocs());
writer.shutdown();
writer.close();
// make sure searching sees right # hits
IndexReader reader = DirectoryReader.open(dir);
@ -633,7 +635,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
@ -659,7 +661,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
@ -678,7 +680,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
mp.setNoCFSRatio(doCFS ? 1.0 : 0.0);
mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
// TODO: remove randomness
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp);
IndexWriter writer = new IndexWriter(dir, conf);
@ -689,23 +691,23 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
if (fullyMerged) {
writer.forceMerge(1);
}
writer.shutdown();
writer.close();
if (!fullyMerged) {
// open fresh writer so we get no prx file in the added segment
mp = new LogByteSizeMergePolicy();
mp.setNoCFSRatio(doCFS ? 1.0 : 0.0);
// TODO: remove randomness
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(10).setMergePolicy(mp);
writer = new IndexWriter(dir, conf);
addNoProxDoc(writer);
writer.shutdown();
writer.close();
writer = new IndexWriter(dir, conf.setMergePolicy(NoMergePolicy.INSTANCE));
Term searchTerm = new Term("id", "7");
writer.deleteDocuments(searchTerm);
writer.shutdown();
writer.close();
}
dir.close();
@ -842,7 +844,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
Directory currentDir = newDirectory();
RandomIndexWriter riw = new RandomIndexWriter(random(), currentDir);
riw.addDocument(new Document());
riw.shutdown();
riw.close();
DirectoryReader ir = DirectoryReader.open(currentDir);
SegmentReader air = (SegmentReader)ir.leaves().get(0).reader();
String currentVersion = air.getSegmentInfo().info.getVersion();
@ -1015,24 +1017,32 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
for (int i = 0; i < 3; i++) {
// only use Log- or TieredMergePolicy, to make document addition predictable and not suddenly merge:
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
.setMergePolicy(mp);
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(mp).setCommitOnClose(false);
IndexWriter w = new IndexWriter(ramDir, iwc);
// add few more docs:
for(int j = 0; j < RANDOM_MULTIPLIER * random().nextInt(30); j++) {
addDoc(w, id++);
}
w.shutdown(false);
try {
w.commit();
} finally {
w.close();
}
}
// add dummy segments (which are all in current
// version) to single segment index
MergePolicy mp = random().nextBoolean() ? newLogMergePolicy() : newTieredMergePolicy();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null)
.setMergePolicy(mp);
IndexWriterConfig iwc = new IndexWriterConfig(null)
.setMergePolicy(mp).setCommitOnClose(false);
IndexWriter w = new IndexWriter(dir, iwc);
w.addIndexes(ramDir);
w.shutdown(false);
try {
w.commit();
} finally {
w.close();
}
// determine count of segments in modified index
final int origSegCount = getNumberOfSegments(dir);
@ -1096,7 +1106,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
verifyDocValues(dir);
// update fields and verify index
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter writer = new IndexWriter(dir, conf);
updateNumeric(writer, "1", "ndv1", "ndv1_c", 300L);
updateNumeric(writer, "1", "ndv2", "ndv2_c", 300L);
@ -1110,7 +1120,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
writer.commit();
verifyDocValues(dir);
writer.shutdown();
writer.close();
dir.close();
}

View File

@ -48,7 +48,7 @@ public class TestBagOfPositions extends LuceneTestCase {
final int maxTermsPerDoc = TestUtil.nextInt(random(), 10, 20);
boolean isSimpleText = "SimpleText".equals(TestUtil.getPostingsFormat("field"));
IndexWriterConfig iwc = newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(random(), new MockAnalyzer(random()));
if ((isSimpleText || iwc.getMergePolicy() instanceof MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1)) {
// Otherwise test can take way too long (> 2 hours)
@ -148,7 +148,7 @@ public class TestBagOfPositions extends LuceneTestCase {
// from a docsAndPositionsEnum.
}
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
}

View File

@ -33,7 +33,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
/**
* Simple test that adds numeric terms, where each term has the
@ -48,7 +47,7 @@ public class TestBagOfPostings extends LuceneTestCase {
boolean isSimpleText = "SimpleText".equals(TestUtil.getPostingsFormat("field"));
IndexWriterConfig iwc = newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(random(), new MockAnalyzer(random()));
if ((isSimpleText || iwc.getMergePolicy() instanceof MockRandomMergePolicy) && (TEST_NIGHTLY || RANDOM_MULTIPLIER > 1)) {
// Otherwise test can take way too long (> 2 hours)
@ -140,7 +139,7 @@ public class TestBagOfPostings extends LuceneTestCase {
// from a docsAndPositionsEnum.
}
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
}

View File

@ -15,7 +15,6 @@ import org.apache.lucene.codecs.asserting.AssertingDocValuesFormat;
import org.apache.lucene.codecs.lucene40.Lucene40RWCodec;
import org.apache.lucene.codecs.lucene41.Lucene41RWCodec;
import org.apache.lucene.codecs.lucene42.Lucene42RWCodec;
import org.apache.lucene.codecs.lucene45.Lucene45DocValuesFormat;
import org.apache.lucene.codecs.lucene45.Lucene45RWCodec;
import org.apache.lucene.codecs.lucene49.Lucene49Codec;
import org.apache.lucene.codecs.lucene49.Lucene49DocValuesFormat;
@ -109,7 +108,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.getConfig().setRAMBufferSizeMB(1000d);
writer.updateBinaryDocValue(new Term("id", "doc-2"), "val", toBytes(7));
assertEquals(4, writer.getFlushDeletesCount());
writer.shutdown();
writer.close();
dir.close();
}
@ -129,11 +128,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader;
if (random().nextBoolean()) { // not NRT
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
} else { // NRT
reader = DirectoryReader.open(writer, true);
writer.shutdown();
writer.close();
}
assertEquals(1, reader.leaves().size());
@ -171,11 +170,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader;
if (random().nextBoolean()) { // not NRT
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
} else { // NRT
reader = DirectoryReader.open(writer, true);
writer.shutdown();
writer.close();
}
for (AtomicReaderContext context : reader.leaves()) {
@ -225,7 +224,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
assertEquals(1, getValue(bdv1, 0));
assertEquals(10, getValue(bdv2, 0));
writer.shutdown();
writer.close();
IOUtils.close(reader1, reader2, dir);
}
@ -254,11 +253,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader;
if (random().nextBoolean()) { // not NRT
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
} else { // NRT
reader = DirectoryReader.open(writer, true);
writer.shutdown();
writer.close();
}
AtomicReader slow = SlowCompositeReaderWrapper.wrap(reader);
@ -298,11 +297,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader;
if (random().nextBoolean()) { // not NRT
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
} else { // NRT
reader = DirectoryReader.open(writer, true);
writer.shutdown();
writer.close();
}
AtomicReader r = reader.leaves().get(0).reader();
@ -332,11 +331,11 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
final DirectoryReader reader;
if (random().nextBoolean()) { // not NRT
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
} else { // NRT
reader = DirectoryReader.open(writer, true);
writer.shutdown();
writer.close();
}
AtomicReader r = reader.leaves().get(0).reader();
@ -367,7 +366,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update all docs' bdv field
writer.updateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv", toBytes(17L));
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader r = reader.leaves().get(0).reader();
@ -413,7 +412,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update all docs' bdv1 field
writer.updateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv1", toBytes(17L));
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader r = reader.leaves().get(0).reader();
@ -446,7 +445,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update all docs' bdv field
writer.updateBinaryDocValue(new Term("dvUpdateKey", "dv"), "bdv", toBytes(17L));
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader r = reader.leaves().get(0).reader();
@ -487,7 +486,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// ok
}
writer.shutdown();
writer.close();
dir.close();
}
@ -511,7 +510,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc); // in-memory document
writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(17L));
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
@ -542,7 +541,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(17L)); // update existing field
writer.updateBinaryDocValue(new Term("key", "doc"), "bdv", toBytes(3L)); // update existing field 2nd time in this commit
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
@ -584,7 +583,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
if (random.nextDouble() < 0.4) {
writer.commit();
} else if (random.nextDouble() < 0.1) {
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random));
writer = new IndexWriter(dir, conf);
}
@ -621,7 +620,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
reader.close();
}
writer.shutdown();
writer.close();
dir.close();
}
@ -641,7 +640,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.updateBinaryDocValue(new Term("k1", "v1"), "bdv", toBytes(17L));
writer.updateBinaryDocValue(new Term("k2", "v2"), "bdv", toBytes(3L));
writer.shutdown();
writer.close();
final DirectoryReader reader = DirectoryReader.open(dir);
final AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
@ -739,7 +738,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// System.out.println();
}
writer.shutdown();
writer.close();
IOUtils.close(reader, dir);
}
@ -747,7 +746,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// (writer.close()), the segments might have merged and that update becomes
// legit.
conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf);
@ -778,7 +777,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update document in the second segment - field should be added and we should
// be able to handle the other document correctly (e.g. no NPE)
writer.updateBinaryDocValue(new Term("id", "doc1"), "bdv", toBytes(5L));
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
for (AtomicReaderContext context : reader.leaves()) {
@ -801,7 +800,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
// prevent merges, otherwise by the time updates are applied
// (writer.shutdown()), the segments might have merged and that update becomes
// (writer.close()), the segments might have merged and that update becomes
// legit.
conf.setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter writer = new IndexWriter(dir, conf);
@ -823,7 +822,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update document in the second segment
writer.updateBinaryDocValue(new Term("id", "doc1"), "bdv", toBytes(5L));
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
for (AtomicReaderContext context : reader.leaves()) {
@ -851,7 +850,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
writer.updateBinaryDocValue(new Term("f", "mock-value"), "f", toBytes(17L));
writer.shutdown();
writer.close();
DirectoryReader r = DirectoryReader.open(dir);
BinaryDocValues bdv = r.leaves().get(0).reader().getBinaryDocValues("f");
@ -875,14 +874,14 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
doc.add(new StringField("id", "doc", Store.NO));
doc.add(new BinaryDocValuesField("f", toBytes(5L)));
writer.addDocument(doc);
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
writer.updateBinaryDocValue(new Term("id", "doc"), "f", toBytes(4L));
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
try {
writer.shutdown();
writer.close();
fail("should not have succeeded to update a segment written with an old Codec");
} catch (UnsupportedOperationException e) {
writer.rollback();
@ -996,7 +995,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
for (Thread t : threads) t.start();
done.await();
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
for (AtomicReaderContext context : reader.leaves()) {
@ -1054,7 +1053,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
}
reader.close();
}
writer.shutdown();
writer.close();
dir.close();
}
@ -1074,7 +1073,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
doc.add(new BinaryDocValuesField("f1", toBytes(5L)));
doc.add(new BinaryDocValuesField("f2", toBytes(13L)));
writer.addDocument(doc);
writer.shutdown();
writer.close();
// change format
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -1092,7 +1091,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
doc.add(new BinaryDocValuesField("f2", toBytes(2L)));
writer.addDocument(doc);
writer.updateBinaryDocValue(new Term("id", "d0"), "f1", toBytes(12L));
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader r = SlowCompositeReaderWrapper.wrap(reader);
@ -1135,7 +1134,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
long value = random().nextInt();
Term term = new Term("id", RandomPicks.randomFrom(random(), randomTerms));
writer.updateDocValues(term, new BinaryDocValuesField("bdv", toBytes(value)), new BinaryDocValuesField("control", toBytes(value * 2)));
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -1147,7 +1146,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.addIndexes(reader);
reader.close();
}
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir2);
for (AtomicReaderContext context : reader.leaves()) {
@ -1188,7 +1187,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
assertEquals(numFiles, dir.listAll().length);
}
writer.shutdown();
writer.close();
dir.close();
}
@ -1242,7 +1241,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
new BinaryDocValuesField("cf" + field, toBytes(value * 2)));
}
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
for (AtomicReaderContext context : reader.leaves()) {
@ -1276,7 +1275,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.updateBinaryDocValue(new Term("upd", "t2"), "f1", toBytes(3L)); // update f1 to 3
writer.updateBinaryDocValue(new Term("upd", "t2"), "f2", toBytes(3L)); // update f2 to 3
writer.updateBinaryDocValue(new Term("upd", "t1"), "f1", toBytes(4L)); // update f1 to 4 (but not f2)
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(4, getValue(reader.leaves().get(0).reader().getBinaryDocValues("f1"), 0));
@ -1300,7 +1299,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
writer.deleteDocuments(new Term("id", "doc")); // delete all docs in the first segment
writer.addDocument(doc);
writer.updateBinaryDocValue(new Term("id", "doc"), "f1", toBytes(2L));
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.leaves().size());
@ -1322,7 +1321,7 @@ public class TestBinaryDocValuesUpdates extends LuceneTestCase {
// update w/ multiple nonexisting terms in same field
writer.updateBinaryDocValue(new Term("c", "foo"), "f1", toBytes(2L));
writer.updateBinaryDocValue(new Term("c", "bar"), "f1", toBytes(2L));
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.leaves().size());

View File

@ -53,7 +53,7 @@ public class TestBinaryTerms extends LuceneTestCase {
}
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);

View File

@ -20,7 +20,6 @@ package org.apache.lucene.index;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
@ -53,7 +52,7 @@ public class TestCheckIndex extends LuceneTestCase {
writer.forceMerge(1);
writer.commit();
writer.deleteDocuments(new Term("field","aaa5"));
writer.shutdown();
writer.close();
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
CheckIndex checker = new CheckIndex(dir);
@ -113,7 +112,7 @@ public class TestCheckIndex extends LuceneTestCase {
));
doc.add(field);
iw.addDocument(doc);
iw.shutdown();
iw.close();
dir.close(); // checkindex
}
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
public class TestCodecHoldsOpenFiles extends LuceneTestCase {
public void test() throws Exception {
@ -38,7 +37,7 @@ public class TestCodecHoldsOpenFiles extends LuceneTestCase {
}
IndexReader r = w.getReader();
w.shutdown();
w.close();
for(String fileName : d.listAll()) {
try {

View File

@ -32,15 +32,12 @@ import org.apache.lucene.codecs.lucene41.Lucene41RWCodec;
import org.apache.lucene.codecs.lucene42.Lucene42RWCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.store.Directory;
@ -838,7 +835,7 @@ public class TestCodecs extends LuceneTestCase {
doc.add(new StringField("f", "doc", Store.NO));
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
Term term = new Term("f", new BytesRef("doc"));
DirectoryReader reader = DirectoryReader.open(dir);
@ -867,7 +864,7 @@ public class TestCodecs extends LuceneTestCase {
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = false;
try {
writer.shutdown();
writer.close();
fail("should not have succeeded to impersonate an old format!");
} catch (UnsupportedOperationException e) {
writer.rollback();

View File

@ -820,7 +820,7 @@ public class TestCompoundFile extends LuceneTestCase
riw.commit();
}
}
riw.shutdown();
riw.close();
checkFiles(dir);
dir.close();
}

View File

@ -123,7 +123,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
assertEquals(20*(i+1)+extraCount, writer.numDocs());
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(directory);
assertEquals(200+extraCount, reader.numDocs());
reader.close();
@ -167,7 +167,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
writer.commit();
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(directory);
// Verify that we did not lose any deletes...
assertEquals(450, reader.numDocs());
@ -191,7 +191,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
@ -199,7 +199,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
writer.shutdown();
writer.close();
directory.close();
}
@ -215,7 +215,8 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
newIndexWriterConfig(new MockAnalyzer(random())).
// Force excessive merging:
setMaxBufferedDocs(2).
setMergePolicy(newLogMergePolicy(100))
setMergePolicy(newLogMergePolicy(100)).
setCommitOnClose(false)
);
for(int iter=0;iter<10;iter++) {
@ -237,7 +238,11 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
writer.shutdown(false);
try {
writer.commit();
} finally {
writer.close();
}
IndexReader reader = DirectoryReader.open(directory);
assertEquals((1+iter)*182, reader.numDocs());
@ -253,7 +258,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
setMaxBufferedDocs(2)
);
}
writer.shutdown();
writer.close();
directory.close();
}
@ -261,7 +266,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
// LUCENE-4544
public void testMaxMergeCount() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())).setCommitOnClose(false);
final int maxMergeCount = TestUtil.nextInt(random(), 1, 5);
final int maxMergeThreads = TestUtil.nextInt(random(), 1, maxMergeCount);
@ -323,7 +328,11 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
w.addDocument(doc);
}
}
w.shutdown(false);
try {
w.commit();
} finally {
w.close();
}
dir.close();
}
@ -370,13 +379,13 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
}
atLeastOneMerge.await();
assertTrue(((TrackingCMS) w.getConfig().getMergeScheduler()).totMergedBytes != 0);
w.shutdown();
w.close();
d.close();
}
public void testLiveMaxMergeCount() throws Exception {
Directory d = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
TieredMergePolicy tmp = new TieredMergePolicy();
tmp.setSegmentsPerTier(1000);
tmp.setMaxMergeAtOnce(1000);

View File

@ -46,7 +46,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer.addDocument(d1);
if (i == 1) {
writer.shutdown();
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
} else {
@ -62,7 +62,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d2.add(new TextField("f4", "fourth field", Field.Store.NO));
writer.addDocument(d2);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
@ -80,7 +80,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.forceMerge(1);
writer.shutdown();
writer.close();
sis = new SegmentInfos();
sis.read(dir);
@ -110,7 +110,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d1.add(new TextField("f2", "second field", Field.Store.YES));
writer.addDocument(d1);
writer.shutdown();
writer.close();
writer = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
@ -123,12 +123,12 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d2.add(new TextField("f4", "fourth field", Field.Store.YES));
writer.addDocument(d2);
writer.shutdown();
writer.close();
writer = new IndexWriter(dir1, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(NoMergePolicy.INSTANCE));
writer.addIndexes(dir2);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir1);
@ -160,7 +160,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d.add(new TextField("f1", "d1 first field", Field.Store.YES));
d.add(new TextField("f2", "d1 second field", Field.Store.YES));
writer.addDocument(d);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(1, sis.size());
@ -177,7 +177,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d.add(new TextField("f1", "d2 first field", Field.Store.YES));
d.add(new StoredField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(2, sis.size());
@ -198,7 +198,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
d.add(new TextField("f2", "d3 second field", Field.Store.YES));
d.add(new StoredField("f3", new byte[] { 1, 2, 3, 4, 5 }));
writer.addDocument(d);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
assertEquals(3, sis.size());
@ -222,14 +222,14 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
// nuke the first segment entirely so that the segment with gaps is
// loaded first!
writer.forceMergeDeletes();
writer.shutdown();
writer.close();
}
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(new LogByteSizeMergePolicy())
.setInfoStream(new FailOnNonBulkMergesInfoStream()));
writer.forceMerge(1);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
@ -267,7 +267,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
}
writer.forceMerge(1);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);

View File

@ -31,14 +31,14 @@ import org.apache.lucene.util.LuceneTestCase;
public class TestCrash extends LuceneTestCase {
private IndexWriter initIndex(Random random, boolean initialCommit) throws IOException {
return initIndex(random, newMockDirectory(random), initialCommit);
return initIndex(random, newMockDirectory(random), initialCommit, true);
}
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit) throws IOException {
private IndexWriter initIndex(Random random, MockDirectoryWrapper dir, boolean initialCommit, boolean commitOnClose) throws IOException {
dir.setLockFactory(NoLockFactory.getNoLockFactory());
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()));
.setMaxBufferedDocs(10).setMergeScheduler(new ConcurrentMergeScheduler()).setCommitOnClose(commitOnClose));
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
if (initialCommit) {
writer.commit();
@ -85,7 +85,7 @@ public class TestCrash extends LuceneTestCase {
Directory dir2 = newDirectory(dir);
dir.close();
new RandomIndexWriter(random(), dir2).shutdown();
new RandomIndexWriter(random(), dir2).close();
dir2.close();
}
@ -111,8 +111,8 @@ public class TestCrash extends LuceneTestCase {
System.out.println("TEST: now crash");
}
crash(writer);
writer = initIndex(random(), dir, false);
writer.shutdown();
writer = initIndex(random(), dir, false, true);
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertTrue(reader.numDocs() < 314);
@ -124,7 +124,7 @@ public class TestCrash extends LuceneTestCase {
Directory dir2 = newDirectory(dir);
dir.close();
new RandomIndexWriter(random(), dir2).shutdown();
new RandomIndexWriter(random(), dir2).close();
dir2.close();
}
@ -136,8 +136,8 @@ public class TestCrash extends LuceneTestCase {
// running when we crash:
dir.setAssertNoUnrefencedFilesOnClose(false);
writer.shutdown();
writer = initIndex(random(), dir, false);
writer.close();
writer = initIndex(random(), dir, false, true);
assertEquals(314, writer.maxDoc());
crash(writer);
@ -160,7 +160,7 @@ public class TestCrash extends LuceneTestCase {
Directory dir2 = newDirectory(dir);
dir.close();
new RandomIndexWriter(random(), dir2).shutdown();
new RandomIndexWriter(random(), dir2).close();
dir2.close();
}
@ -169,7 +169,7 @@ public class TestCrash extends LuceneTestCase {
IndexWriter writer = initIndex(random(), false);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
writer.shutdown();
writer.close();
dir.crash();
/*
@ -186,11 +186,15 @@ public class TestCrash extends LuceneTestCase {
}
public void testCrashAfterCloseNoWait() throws IOException {
IndexWriter writer = initIndex(random(), false);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
Random random = random();
MockDirectoryWrapper dir = newMockDirectory(random);
IndexWriter writer = initIndex(random, dir, false, false);
writer.shutdown(false);
try {
writer.commit();
} finally {
writer.close();
}
dir.crash();

View File

@ -32,7 +32,6 @@ import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
public class TestCrashCausesCorruptIndex extends LuceneTestCase {
@ -81,7 +80,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// expected
}
// writes segments_3
indexWriter.shutdown();
indexWriter.close();
assertFalse(slowFileExists(realDirectory, "segments_2"));
crashAfterCreateOutput.close();
}
@ -101,7 +100,7 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
// currently the test fails above.
// however, to test the fix, the following lines should pass as well.
indexWriter.addDocument(getDocument());
indexWriter.shutdown();
indexWriter.close();
assertFalse(slowFileExists(realDirectory, "segments_2"));
realDirectory.close();
}

View File

@ -65,7 +65,7 @@ public class TestCustomNorms extends LuceneTestCase {
}
}
writer.commit();
writer.shutdown();
writer.close();
AtomicReader open = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(dir));
NumericDocValues norms = open.getNormValues(floatTestField);
assertNotNull(norms);

View File

@ -231,7 +231,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.setCommitData(commitData);
writer.commit();
writer.shutdown();
writer.close();
long lastDeleteTime = 0;
final int targetNumDelete = TestUtil.nextInt(random(), 1, 5);
@ -253,7 +253,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
commitData.put("commitTime", String.valueOf(System.currentTimeMillis()));
writer.setCommitData(commitData);
writer.commit();
writer.shutdown();
writer.close();
Thread.sleep((int) (1000.0*(SECONDS/5.0)));
}
@ -326,7 +326,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.shutdown();
writer.close();
final boolean needsMerging;
{
@ -346,7 +346,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer = new IndexWriter(dir, conf);
policy = (KeepAllDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.forceMerge(1);
writer.shutdown();
writer.close();
}
assertEquals(needsMerging ? 2:1, policy.numOnInit);
@ -385,7 +385,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setIndexDeletionPolicy(policy));
writer.shutdown();
writer.close();
int postCount = dir.listAll().length;
assertTrue(postCount < preCount);
}
@ -414,7 +414,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
if ((1+i)%2 == 0)
writer.commit();
}
writer.shutdown();
writer.close();
Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
assertEquals(5, commits.size());
@ -431,7 +431,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.forceMerge(1);
writer.shutdown();
writer.close();
assertEquals(6, DirectoryReader.listCommits(dir).size());
@ -455,7 +455,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
.setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
writer.shutdown();
writer.close();
// Now 8 because we made another commit
assertEquals(7, DirectoryReader.listCommits(dir).size());
@ -471,7 +471,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy));
writer.forceMerge(1);
writer.shutdown();
writer.close();
r = DirectoryReader.open(dir);
assertEquals(1, r.leaves().size());
@ -491,7 +491,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(10, r.numDocs());
r.close();
writer.shutdown();
writer.close();
// Now reader sees not-fully-merged index:
r = DirectoryReader.open(dir);
@ -525,7 +525,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
@ -535,7 +535,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer = new IndexWriter(dir, conf);
policy = (KeepNoneOnInitDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.forceMerge(1);
writer.shutdown();
writer.close();
assertEquals(2, policy.numOnInit);
// If we are not auto committing then there should
@ -577,7 +577,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
addDoc(writer);
}
writer.forceMerge(1);
writer.shutdown();
writer.close();
}
assertTrue(policy.numDelete > 0);
@ -631,7 +631,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0);
IndexWriter writer = new IndexWriter(dir, conf);
KeepLastNDeletionPolicy policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.shutdown();
writer.close();
Term searchTerm = new Term("content", "aaa");
Query query = new TermQuery(searchTerm);
@ -649,15 +649,15 @@ public class TestDeletionPolicy extends LuceneTestCase {
addDocWithID(writer, i*(N+1)+j);
}
// this is a commit
writer.shutdown();
conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
writer.close();
conf = new IndexWriterConfig(new MockAnalyzer(random()))
.setIndexDeletionPolicy(policy)
.setMergePolicy(NoMergePolicy.INSTANCE);
writer = new IndexWriter(dir, conf);
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
writer.deleteDocuments(new Term("id", "" + (i*(N+1)+3)));
// this is a commit
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
@ -670,7 +670,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy();
// This will not commit: there are no changes
// pending because we opened for "create":
writer.shutdown();
writer.close();
}
assertEquals(3*(N+1)+1, policy.numOnInit);

View File

@ -129,14 +129,14 @@ public class TestDirectoryReader extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("body", s, Field.Store.NO));
iw.addDocument(doc);
iw.shutdown();
iw.close();
}
public void testIsCurrent() throws Exception {
Directory d = newDirectory();
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
addDocumentWithFields(writer);
writer.shutdown();
writer.close();
// set up reader:
DirectoryReader reader = DirectoryReader.open(d);
assertTrue(reader.isCurrent());
@ -144,13 +144,13 @@ public class TestDirectoryReader extends LuceneTestCase {
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.shutdown();
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.shutdown();
writer.close();
assertFalse(reader.isCurrent());
reader.close();
d.close();
@ -179,7 +179,7 @@ public class TestDirectoryReader extends LuceneTestCase {
doc.add(new TextField("unstored","test1", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
// set up reader
DirectoryReader reader = DirectoryReader.open(d);
FieldInfos fieldInfos = MultiFields.getMergedFieldInfos(reader);
@ -239,7 +239,7 @@ public class TestDirectoryReader extends LuceneTestCase {
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
// verify fields again
reader = DirectoryReader.open(d);
@ -337,7 +337,7 @@ public void testTermVectors() throws Exception {
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
d.close();
}
@ -375,7 +375,7 @@ void assertTermDocsCount(String msg,
addDocumentWithDifferentFields(writer);
addDocumentWithTermVectorFields(writer);
}
writer.shutdown();
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
@ -383,7 +383,7 @@ void assertTermDocsCount(String msg,
doc.add(new StoredField("bin1", bin));
doc.add(new TextField("junk", "junk text", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
StoredDocument doc2 = reader.document(reader.maxDoc() - 1);
StorableField[] fields = doc2.getFields("bin1");
@ -404,7 +404,7 @@ void assertTermDocsCount(String msg,
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
doc2 = reader.document(reader.maxDoc() - 1);
fields = doc2.getFields("bin1");
@ -442,7 +442,7 @@ public void testFilesOpenClose() throws IOException {
Directory dir = newFSDirectory(dirFile);
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
addDoc(writer, "test");
writer.shutdown();
writer.close();
dir.close();
// Try to erase the data - this ensures that the writer closed all files
@ -453,7 +453,7 @@ public void testFilesOpenClose() throws IOException {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
addDoc(writer, "test");
writer.shutdown();
writer.close();
dir.close();
// Now open existing directory and test that reader closes all files
@ -657,7 +657,7 @@ public void testFilesOpenClose() throws IOException {
);
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(d);
@ -678,7 +678,7 @@ public void testFilesOpenClose() throws IOException {
);
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
writer.shutdown();
writer.close();
DirectoryReader r2 = DirectoryReader.openIfChanged(r);
assertNotNull(r2);
@ -689,7 +689,7 @@ public void testFilesOpenClose() throws IOException {
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
writer.close();
r2 = DirectoryReader.openIfChanged(r);
assertNotNull(r2);
@ -737,7 +737,7 @@ public void testFilesOpenClose() throws IOException {
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("a"));
writer.shutdown();
writer.close();
Collection<IndexCommit> commits = DirectoryReader.listCommits(dir);
for (final IndexCommit commit : commits) {
@ -778,7 +778,7 @@ public void testFilesOpenClose() throws IOException {
assertEquals(10, s.reader().terms("number").size());
}
r2.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -798,7 +798,7 @@ public void testFilesOpenClose() throws IOException {
assertNull(r2);
writer.commit();
assertFalse(r.isCurrent());
writer.shutdown();
writer.close();
r.close();
dir.close();
}
@ -818,7 +818,7 @@ public void testFilesOpenClose() throws IOException {
writer.addDocument(new Document());
writer.commit();
sdp.snapshot();
writer.shutdown();
writer.close();
long currentGen = 0;
for (IndexCommit ic : DirectoryReader.listCommits(dir)) {
assertTrue("currentGen=" + currentGen + " commitGen=" + ic.getGeneration(), currentGen < ic.getGeneration());
@ -836,7 +836,7 @@ public void testFilesOpenClose() throws IOException {
d.add(newTextField("f", "a a b", Field.Store.NO));
writer.addDocument(d);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
try {
// Make sure codec impls totalTermFreq (eg PreFlex doesn't)
Assume.assumeTrue(r.totalTermFreq(new Term("f", new BytesRef("b"))) != -1);
@ -859,7 +859,7 @@ public void testFilesOpenClose() throws IOException {
d.add(newTextField("f", "b", Field.Store.NO));
writer.addDocument(d);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
try {
// Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
Assume.assumeTrue(r.getSumDocFreq("f") != -1);
@ -880,7 +880,7 @@ public void testFilesOpenClose() throws IOException {
d.add(newTextField("f", "a", Field.Store.NO));
writer.addDocument(d);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
try {
// Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
Assume.assumeTrue(r.getDocCount("f") != -1);
@ -901,7 +901,7 @@ public void testFilesOpenClose() throws IOException {
d.add(newTextField("f", "a a b", Field.Store.NO));
writer.addDocument(d);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
try {
// Make sure codec impls getSumDocFreq (eg PreFlex doesn't)
Assume.assumeTrue(r.getSumTotalTermFreq("f") != -1);
@ -937,7 +937,7 @@ public void testFilesOpenClose() throws IOException {
// Close the top reader, its the only one that should be closed
assertEquals(1, closeCount[0]);
writer.shutdown();
writer.close();
DirectoryReader reader2 = DirectoryReader.open(dir);
reader2.addReaderClosedListener(listener);
@ -953,7 +953,7 @@ public void testFilesOpenClose() throws IOException {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
r.document(0);
try {
r.document(1);
@ -975,7 +975,7 @@ public void testFilesOpenClose() throws IOException {
r.decRef();
r.close();
assertFalse(r.tryIncRef());
writer.shutdown();
writer.close();
dir.close();
}
@ -1003,7 +1003,7 @@ public void testFilesOpenClose() throws IOException {
assertNull(threads[i].failed);
}
assertFalse(r.tryIncRef());
writer.shutdown();
writer.close();
dir.close();
}
@ -1039,7 +1039,7 @@ public void testFilesOpenClose() throws IOException {
doc.add(newStringField("field2", "foobaz", Field.Store.YES));
writer.addDocument(doc);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
Set<String> fieldsToLoad = new HashSet<>();
assertEquals(0, r.document(0, fieldsToLoad).getFields().size());
fieldsToLoad.add("field1");

View File

@ -148,7 +148,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
}
} finally {
iwriter.shutdown();
iwriter.close();
reader.close();
}
}
@ -208,15 +208,14 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
writer.addDocument(createDocument(i, 3));
}
writer.forceMerge(1);
writer.shutdown();
writer.close();
final TestReopen test = new TestReopen() {
@Override
protected void modifyIndex(int i) throws IOException {
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
modifier.addDocument(createDocument(n + i, 6));
modifier.shutdown();
modifier.close();
}
@Override
@ -434,8 +433,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public static void createIndex(Random random, Directory dir, boolean multiSegment) throws IOException {
IndexWriter.unlock(dir);
IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random))
IndexWriter w = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(random, new MockAnalyzer(random))
.setMergePolicy(new LogDocMergePolicy()));
for (int i = 0; i < 100; i++) {
@ -449,7 +447,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
w.forceMerge(1);
}
w.shutdown();
w.close();
DirectoryReader r = DirectoryReader.open(dir);
if (multiSegment) {
@ -487,31 +485,31 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: modify index");
}
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
w.deleteDocuments(new Term("field2", "a11"));
w.deleteDocuments(new Term("field2", "b30"));
w.shutdown();
w.close();
break;
}
case 1: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
w.forceMerge(1);
w.shutdown();
w.close();
break;
}
case 2: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.forceMerge(1);
w.addDocument(createDocument(102, 4));
w.addDocument(createDocument(103, 4));
w.shutdown();
w.close();
break;
}
case 3: {
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
w.addDocument(createDocument(101, 4));
w.shutdown();
w.close();
break;
}
}
@ -569,7 +567,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
writer.setCommitData(data);
writer.commit();
}
writer.shutdown();
writer.close();
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(0, r.numDocs());
@ -619,7 +617,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
assertNotNull(r2);
r.close();
assertEquals(1, r2.numDocs());
w.shutdown();
w.close();
r2.close();
dir.close();
}
@ -627,7 +625,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testOverDecRefDuringReopen() throws Exception {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setCodec(Codec.forName("Lucene49"));
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -696,7 +694,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testNPEAfterInvalidReindex1() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("id", "id", Field.Store.NO));
w.addDocument(doc);
@ -715,7 +713,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
dir.deleteFile(fileName);
}
w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(newStringField("id", "id", Field.Store.NO));
doc.add(new NumericDocValuesField("ndv", 13));
@ -746,7 +744,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
public void testNPEAfterInvalidReindex2() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("id", "id", Field.Store.NO));
w.addDocument(doc);
@ -765,7 +763,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
dir.deleteFile(fileName);
}
w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())));
doc = new Document();
doc.add(newStringField("id", "id", Field.Store.NO));
doc.add(new NumericDocValuesField("ndv", 13));
@ -785,7 +783,6 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
}
r.close();
w.close();
dir.close();
}
}

View File

@ -134,7 +134,7 @@ public class TestDoc extends LuceneTestCase {
SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
printSegment(out, si2);
writer.shutdown();
writer.close();
SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
printSegment(out, siMerge);
@ -176,7 +176,7 @@ public class TestDoc extends LuceneTestCase {
si2 = indexDoc(writer, "test2.txt");
printSegment(out, si2);
writer.shutdown();
writer.close();
siMerge = merge(directory, si1, si2, "_merge", true);
printSegment(out, siMerge);

View File

@ -43,7 +43,7 @@ public class TestDocCount extends LuceneTestCase {
ir = iw.getReader();
verifyCount(ir);
ir.close();
iw.shutdown();
iw.close();
dir.close();
}

View File

@ -18,7 +18,6 @@ package org.apache.lucene.index;
*/
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.Tokenizer;
@ -69,7 +68,7 @@ public class TestDocInverterPerFieldErrorInfo extends LuceneTestCase {
public void testInfoStreamGetsFieldName() throws Exception {
Directory dir = newDirectory();
IndexWriter writer;
IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
IndexWriterConfig c = new IndexWriterConfig(new ThrowingAnalyzer());
final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
@ -86,7 +85,7 @@ public class TestDocInverterPerFieldErrorInfo extends LuceneTestCase {
assertTrue(infoStream.contains("distinctiveFieldName"));
}
writer.shutdown();
writer.close();
dir.close();
}
@ -94,7 +93,7 @@ public class TestDocInverterPerFieldErrorInfo extends LuceneTestCase {
public void testNoExtraNoise() throws Exception {
Directory dir = newDirectory();
IndexWriter writer;
IndexWriterConfig c = new IndexWriterConfig(TEST_VERSION_CURRENT, new ThrowingAnalyzer());
IndexWriterConfig c = new IndexWriterConfig(new ThrowingAnalyzer());
final ByteArrayOutputStream infoBytes = new ByteArrayOutputStream();
PrintStream infoPrintStream = new PrintStream(infoBytes, true, IOUtils.UTF_8);
PrintStreamInfoStream printStreamInfoStream = new PrintStreamInfoStream(infoPrintStream);
@ -111,7 +110,7 @@ public class TestDocInverterPerFieldErrorInfo extends LuceneTestCase {
String infoStream = new String(infoBytes.toByteArray(), IOUtils.UTF_8);
assertFalse(infoStream.contains("boringFieldName"));
writer.shutdown();
writer.close();
dir.close();
}

View File

@ -56,7 +56,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(new NumericDocValuesField("dv", 1));
w.addDocument(doc);
IndexReader r1 = w.getReader();
w.shutdown();
w.close();
Directory d2 = newDirectory();
w = new RandomIndexWriter(random(), d2);
@ -65,7 +65,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc.add(new NumericDocValuesField("dv", 2));
w.addDocument(doc);
IndexReader r2 = w.getReader();
w.shutdown();
w.close();
Directory d3 = newDirectory();
w = new RandomIndexWriter(random(), d3);
@ -77,7 +77,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
w.forceMerge(1);
DirectoryReader r3 = w.getReader();
w.shutdown();
w.close();
AtomicReader sr = getOnlySegmentReader(r3);
assertEquals(2, sr.numDocs());
NumericDocValues docValues = sr.getNumericDocValues("dv");
@ -107,7 +107,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
DirectoryReader r = w.getReader();
w.shutdown();
w.close();
assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0));
r.close();
d.close();
@ -131,7 +131,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
}
DirectoryReader r = w.getReader();
w.shutdown();
w.close();
assertEquals(17, DocValues.getNumeric(getOnlySegmentReader(r), "field").get(0));
r.close();
d.close();
@ -156,14 +156,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
DirectoryReader r = w.getReader();
assertEquals(17, getOnlySegmentReader(r).getNumericDocValues("field").get(0));
r.close();
w.shutdown();
w.close();
d.close();
}
// LUCENE-3870
public void testLengthPrefixAcrossTwoPages() throws Exception {
Directory d = newDirectory();
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
byte[] bytes = new byte[32764];
BytesRef b = new BytesRef();
@ -187,7 +187,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
bytes[0] = 1;
assertEquals(b, bytes1);
r.close();
w.shutdown();
w.close();
d.close();
}
@ -216,7 +216,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
assertEquals(Integer.toString(i), d.get("docId"));
}
slow.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -237,7 +237,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = w.getReader();
assertEquals(1, ir.numDocs());
ir.close();
w.shutdown();
w.close();
dir.close();
}
@ -259,7 +259,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = w.getReader();
assertEquals(1, ir.numDocs());
ir.close();
w.shutdown();
w.close();
dir.close();
}
@ -289,7 +289,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = iwriter.getReader();
assertEquals(1, ir.numDocs());
ir.close();
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -317,7 +317,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
assertEquals(1, ir.numDocs());
ir.close();
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -343,7 +343,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = iwriter.getReader();
assertEquals(1, ir.numDocs());
ir.close();
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -373,7 +373,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = iwriter.getReader();
assertEquals(1, ir.numDocs());
ir.close();
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -404,7 +404,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = iwriter.getReader();
assertEquals(1, ir.numDocs());
ir.close();
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -424,7 +424,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
w.shutdown();
w.close();
dir.close();
}
@ -440,7 +440,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
w.addDocument(doc);
w.shutdown();
w.close();
dir.close();
}
@ -451,7 +451,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("foo", 0));
w.addDocument(doc);
w.shutdown();
w.close();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
@ -459,7 +459,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
w.addDocument(doc);
w.shutdown();
w.close();
dir.close();
}
@ -507,7 +507,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
t.join();
}
assertTrue(hitExc.get());
w.shutdown();
w.close();
dir.close();
}
@ -525,7 +525,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("foo", new BytesRef("hello")));
w2.addDocument(doc);
w2.shutdown();
w2.close();
try {
w.addIndexes(new Directory[] {dir2});
@ -542,7 +542,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
r.close();
dir2.close();
w.shutdown();
w.close();
dir.close();
}
@ -564,7 +564,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
IndexReader ir = writer.getReader();
assertEquals(1, ir.numDocs());
ir.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -575,7 +575,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
@ -587,7 +587,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
writer.shutdown();
writer.close();
dir.close();
}
@ -598,7 +598,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
writer = new IndexWriter(dir, conf);
@ -606,7 +606,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
writer.addDocument(doc);
writer.shutdown();
writer.close();
dir.close();
}
@ -621,7 +621,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
writer.addDocument(doc);
writer.shutdown();
writer.close();
dir.close();
}
@ -637,7 +637,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
writer.addDocument(doc);
writer.shutdown();
writer.close();
dir.close();
}
@ -648,14 +648,14 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
writer = new IndexWriter(dir, conf);
doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo")));
writer.addDocument(doc);
writer.shutdown();
writer.close();
dir.close();
}
@ -666,7 +666,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -680,7 +680,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
writer.shutdown();
writer.close();
dir.close();
dir2.close();
@ -693,7 +693,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -709,7 +709,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
// expected
}
readers[0].close();
writer.shutdown();
writer.close();
dir.close();
dir2.close();
@ -722,7 +722,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -736,7 +736,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
writer.shutdown();
writer.close();
dir2.close();
dir.close();
}
@ -748,7 +748,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
Document doc = new Document();
doc.add(new NumericDocValuesField("dv", 0L));
writer.addDocument(doc);
writer.shutdown();
writer.close();
Directory dir2 = newDirectory();
conf = newIndexWriterConfig(new MockAnalyzer(random()));
@ -764,7 +764,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} catch (IllegalArgumentException iae) {
// expected
}
writer.shutdown();
writer.close();
dir2.close();
dir.close();
}
@ -783,7 +783,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.addDocument(doc);
DirectoryReader r = writer.getReader();
writer.shutdown();
writer.close();
AtomicReader subR = r.leaves().get(0).reader();
assertEquals(2, subR.numDocs());

View File

@ -58,7 +58,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
writer.addDocument(doc);
}
IndexReader reader = writer.getReader();
writer.shutdown();
writer.close();
int num = atLeast(13);
for (int i = 0; i < num; i++) {
@ -141,7 +141,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
}
IndexReader reader = writer.getReader();
writer.shutdown();
writer.close();
int num = atLeast(13);
for (int i = 0; i < num; i++) {
@ -218,7 +218,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
}
IndexReader reader = writer.getReader();
writer.shutdown();
writer.close();
int num = atLeast(13);
for (int i = 0; i < num; i++) {
@ -295,7 +295,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
// now do searches
IndexReader reader = writer.getReader();
writer.shutdown();
writer.close();
int num = atLeast(13);
for (int i = 0; i < num; i++) {
@ -348,7 +348,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
writer.shutdown();
writer.close();
r.close();
dir.close();
}
@ -373,7 +373,7 @@ public class TestDocsAndPositions extends LuceneTestCase {
docid = disi.docID();
assertEquals(-1, docid);
assertTrue(disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
writer.shutdown();
writer.close();
r.close();
dir.close();
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInfo.IndexOptions;
import org.apache.lucene.search.DocIdSetIterator;
@ -62,7 +61,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.addDocument(testDoc);
writer.commit();
SegmentCommitInfo info = writer.newestSegment();
writer.shutdown();
writer.close();
//After adding the document, we should be able to read it back in
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
assertTrue(reader != null);
@ -124,7 +123,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
SegmentCommitInfo info = writer.newestSegment();
writer.shutdown();
writer.close();
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
@ -196,7 +195,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
SegmentCommitInfo info = writer.newestSegment();
writer.shutdown();
writer.close();
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, reader.getLiveDocs(), "f1", new BytesRef("a"));
@ -238,7 +237,7 @@ public class TestDocumentWriter extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
SegmentCommitInfo info = writer.newestSegment();
writer.shutdown();
writer.close();
SegmentReader reader = new SegmentReader(info, newIOContext(random()));
DocsAndPositionsEnum termPositions = reader.termPositionsEnum(new Term("preanalyzed", "term1"));
@ -283,7 +282,7 @@ public class TestDocumentWriter extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(doc);
writer.forceMerge(1); // be sure to have a single segment
writer.shutdown();
writer.close();
TestUtil.checkIndex(dir);

View File

@ -90,9 +90,9 @@ public class TestDuelingCodecs extends LuceneTestCase {
createRandomIndex(numdocs, rightWriter, seed);
leftReader = maybeWrapReader(leftWriter.getReader());
leftWriter.shutdown();
leftWriter.close();
rightReader = maybeWrapReader(rightWriter.getReader());
rightWriter.shutdown();
rightWriter.close();
// check that our readers are valid
TestUtil.checkReader(leftReader);

View File

@ -55,9 +55,7 @@ public class TestExceedMaxTermLength extends LuceneTestCase {
public void test() throws Exception {
IndexWriter w = new IndexWriter
(dir, newIndexWriterConfig(random(),
TEST_VERSION_CURRENT,
new MockAnalyzer(random())));
(dir, newIndexWriterConfig(random(), new MockAnalyzer(random())));
try {
final FieldType ft = new FieldType();
ft.setIndexed(true);
@ -101,7 +99,7 @@ public class TestExceedMaxTermLength extends LuceneTestCase {
msg.contains("bytes can be at most") && msg.contains("in length; got"));
}
} finally {
w.shutdown();
w.close();
}
}
}

View File

@ -128,7 +128,7 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
public void testIndexWriterActuallyReuses() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
final MyField field1 = new MyField();
iw.addDocument(new IndexDocument() {
@ -156,7 +156,7 @@ public class TestFieldReuse extends BaseTokenStreamTestCase {
}
});
assertSame(previous, field2.lastSeen);
iw.shutdown();
iw.close();
dir.close();
}

View File

@ -57,7 +57,7 @@ public class TestFieldsReader extends LuceneTestCase {
conf.getMergePolicy().setNoCFSRatio(0.0);
IndexWriter writer = new IndexWriter(dir, conf);
writer.addDocument(testDoc);
writer.shutdown();
writer.close();
FaultyIndexInput.doFail = false;
}
@ -204,7 +204,7 @@ public class TestFieldsReader extends LuceneTestCase {
for(int i=0;i<2;i++)
writer.addDocument(testDoc);
writer.forceMerge(1);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);

View File

@ -130,7 +130,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
d3.add(newTextField("default", "two four", Field.Store.YES));
writer.addDocument(d3);
writer.shutdown();
writer.close();
Directory target = newDirectory();
@ -140,7 +140,7 @@ public class TestFilterAtomicReader extends LuceneTestCase {
writer = new IndexWriter(target, newIndexWriterConfig(new MockAnalyzer(random())));
IndexReader reader = new TestReader(DirectoryReader.open(directory));
writer.addIndexes(reader);
writer.shutdown();
writer.close();
reader.close();
reader = DirectoryReader.open(target);

View File

@ -33,7 +33,7 @@ public class TestFlex extends LuceneTestCase {
IndexWriter w = new IndexWriter(
d,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
new IndexWriterConfig(new MockAnalyzer(random())).
setMaxBufferedDocs(7).setMergePolicy(newLogMergePolicy())
);
@ -58,7 +58,7 @@ public class TestFlex extends LuceneTestCase {
r.close();
}
w.shutdown();
w.close();
d.close();
}
@ -79,7 +79,7 @@ public class TestFlex extends LuceneTestCase {
// ok -- codec is not required to support this op
}
r.close();
w.shutdown();
w.close();
d.close();
}
}

View File

@ -111,7 +111,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
if (ensureNotStalled) {
assertFalse(docsWriter.flushControl.stallControl.wasStalled());
}
writer.shutdown();
writer.close();
assertEquals(0, flushControl.activeBytes());
dir.close();
}
@ -162,7 +162,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
assertTrue("peak bytes without flush exceeded watermark",
flushPolicy.peakDocCountWithoutFlush <= iwc.getMaxBufferedDocs());
assertActiveBytesAfter(flushControl);
writer.shutdown();
writer.close();
assertEquals(0, flushControl.activeBytes());
dir.close();
}
@ -224,7 +224,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
assertFalse("never block if we don't flush on RAM", docsWriter.flushControl.stallControl.hasBlocked());
}
r.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -274,7 +274,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
assertTrue(docsWriter.flushControl.stallControl.wasStalled());
}
assertActiveBytesAfter(flushControl);
writer.shutdown();
writer.close();
dir.close();
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TestUtil;
public class TestForTooMuchCloning extends LuceneTestCase {
@ -57,7 +56,7 @@ public class TestForTooMuchCloning extends LuceneTestCase {
w.addDocument(doc);
}
final IndexReader r = w.getReader();
w.shutdown();
w.close();
final int cloneCount = dir.getInputCloneCount();
//System.out.println("merge clone count=" + cloneCount);

View File

@ -74,7 +74,7 @@ public class TestForceMergeForever extends LuceneTestCase {
((LogMergePolicy) mp).setMergeFactor(mergeAtOnce);
} else {
// skip test
w.shutdown();
w.close();
d.close();
return;
}
@ -101,7 +101,7 @@ public class TestForceMergeForever extends LuceneTestCase {
doStop.set(true);
t.join();
assertTrue("merge count is " + w.mergeCount.get(), w.mergeCount.get() <= 1);
w.shutdown();
w.close();
d.close();
docs.close();
}

View File

@ -66,7 +66,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
for(;i<45;i++) {
addDoc(writer, i);
}
writer.shutdown();
writer.close();
// Delete one doc so we get a .del file:
writer = new IndexWriter(
@ -77,7 +77,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
);
Term searchTerm = new Term("id", "7");
writer.deleteDocuments(searchTerm);
writer.shutdown();
writer.close();
// Now, artificially create an extra .del file & extra
// .s0 file:
@ -127,7 +127,7 @@ public class TestIndexFileDeleter extends LuceneTestCase {
// files and nothing more:
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.shutdown();
writer.close();
String[] files2 = dir.listAll();
dir.close();

View File

@ -38,9 +38,9 @@ public class TestIndexReaderClose extends LuceneTestCase {
final int iters = 1000 + 1 + random().nextInt(20);
for (int j = 0; j < iters; j++) {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(random(), new MockAnalyzer(random())));
writer.commit();
writer.shutdown();
writer.close();
DirectoryReader open = DirectoryReader.open(dir);
final boolean throwOnClose = !rarely();
AtomicReader wrap = SlowCompositeReaderWrapper.wrap(open);

View File

@ -83,11 +83,9 @@ import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.SetOnce;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.Version;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.packed.PackedInts;
import org.junit.Test;
public class TestIndexWriter extends LuceneTestCase {
@ -114,7 +112,7 @@ public class TestIndexWriter extends LuceneTestCase {
addDocWithIndex(writer,i);
}
assertEquals(100, writer.maxDoc());
writer.shutdown();
writer.close();
// delete 40 documents
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
@ -122,7 +120,7 @@ public class TestIndexWriter extends LuceneTestCase {
for (i = 0; i < 40; i++) {
writer.deleteDocuments(new Term("id", ""+i));
}
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
assertEquals(60, reader.numDocs());
@ -134,7 +132,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.forceMerge(1);
assertEquals(60, writer.maxDoc());
assertEquals(60, writer.numDocs());
writer.shutdown();
writer.close();
// check that the index reader gives the same numbers.
reader = DirectoryReader.open(dir);
@ -148,7 +146,7 @@ public class TestIndexWriter extends LuceneTestCase {
.setOpenMode(OpenMode.CREATE));
assertEquals(0, writer.maxDoc());
assertEquals(0, writer.numDocs());
writer.shutdown();
writer.close();
dir.close();
}
@ -171,7 +169,7 @@ public class TestIndexWriter extends LuceneTestCase {
public static void assertNoUnreferencedFiles(Directory dir, String message) throws IOException {
String[] startFiles = dir.listAll();
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))).rollback();
new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random()))).rollback();
String[] endFiles = dir.listAll();
Arrays.sort(startFiles);
@ -202,7 +200,7 @@ public class TestIndexWriter extends LuceneTestCase {
// add one document & close writer
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
addDoc(writer);
writer.shutdown();
writer.close();
// now open reader:
IndexReader reader = DirectoryReader.open(dir);
@ -213,7 +211,7 @@ public class TestIndexWriter extends LuceneTestCase {
.setOpenMode(OpenMode.CREATE));
assertEquals("should be zero documents", writer.maxDoc(), 0);
addDoc(writer);
writer.shutdown();
writer.close();
assertEquals("should be one document", reader.numDocs(), 1);
IndexReader reader2 = DirectoryReader.open(dir);
@ -233,7 +231,7 @@ public class TestIndexWriter extends LuceneTestCase {
addDoc(writer);
// close
writer.shutdown();
writer.close();
try {
addDoc(writer);
fail("did not hit AlreadyClosedException");
@ -249,7 +247,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.commit();
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(0, reader.maxDoc());
@ -259,7 +257,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.commit();
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
assertEquals(0, reader.maxDoc());
@ -282,7 +280,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newField("f"+j, "aaa", storedTextType));
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(100, reader.maxDoc());
@ -318,7 +316,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(numFile > lastNumFile);
lastNumFile = numFile;
}
writer.shutdown();
writer.close();
dir.close();
}
@ -374,7 +372,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(flushCount > lastFlushCount);
}
}
writer.shutdown();
writer.close();
dir.close();
}
@ -435,7 +433,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(flushCount > lastFlushCount);
}
}
writer.shutdown();
writer.close();
dir.close();
}
@ -478,7 +476,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.addDocument(doc);
}
}
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
@ -508,7 +506,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(f);
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
Term searchTerm = new Term("field", "aaa");
@ -533,7 +531,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(f);
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@ -565,7 +563,7 @@ public class TestIndexWriter extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true);
doc.add(newField("field", b.toString(), customType));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.maxDoc());
@ -609,7 +607,7 @@ public class TestIndexWriter extends LuceneTestCase {
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
writer.shutdown();
writer.close();
Term searchTerm = new Term("content", "aaa");
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
@ -619,7 +617,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
writer.shutdown();
writer.close();
dir.close();
}
@ -640,7 +638,7 @@ public class TestIndexWriter extends LuceneTestCase {
for(int i=0;i<19;i++)
writer.addDocument(doc);
writer.flush(false, true);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
// Since we flushed w/o allowing merging we should now
@ -666,7 +664,7 @@ public class TestIndexWriter extends LuceneTestCase {
System.out.println("\nTEST: now add empty doc");
}
writer.addDocument(new Document());
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
@ -689,7 +687,7 @@ public class TestIndexWriter extends LuceneTestCase {
customType.setStoreTermVectors(true);
document.add(newField("tvtest", "", customType));
iw.addDocument(document);
iw.shutdown();
iw.close();
dir.close();
}
@ -710,7 +708,7 @@ public class TestIndexWriter extends LuceneTestCase {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
for(int i=0;i<4;i++)
iw.addDocument(document);
iw.shutdown();
iw.close();
dir.close();
} finally {
Thread.currentThread().setPriority(pri);
@ -751,14 +749,14 @@ public class TestIndexWriter extends LuceneTestCase {
for(int j=0;j<4;j++)
writer.addDocument(doc);
writer.shutdown();
writer.close();
if (0 == i % 4) {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
//LogMergePolicy lmp2 = (LogMergePolicy) writer.getConfig().getMergePolicy();
//lmp2.setNoCFSRatio(0.0);
writer.forceMerge(1);
writer.shutdown();
writer.close();
}
}
dir.close();
@ -777,7 +775,7 @@ public class TestIndexWriter extends LuceneTestCase {
b.append(" x");
doc.add(newTextField("field", b.toString(), Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
Term t = new Term("field", "x");
@ -795,7 +793,7 @@ public class TestIndexWriter extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("", "a b c", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
dir.close();
}
@ -805,7 +803,7 @@ public class TestIndexWriter extends LuceneTestCase {
Document doc = new Document();
doc.add(newTextField("", "a b c", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader subreader = getOnlySegmentReader(reader);
TermsEnum te = subreader.fields().terms("").iterator(null);
@ -826,7 +824,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newStringField("", "b", Field.Store.NO));
doc.add(newStringField("", "c", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
AtomicReader subreader = getOnlySegmentReader(reader);
TermsEnum te = subreader.fields().terms("").iterator(null);
@ -879,7 +877,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.commit();
assertTrue(w.beforeWasCalled);
assertTrue(w.afterWasCalled);
w.shutdown();
w.close();
IndexReader ir = DirectoryReader.open(dir);
assertEquals(0, ir.numDocs());
@ -918,7 +916,7 @@ public class TestIndexWriter extends LuceneTestCase {
} catch (IllegalArgumentException iea) {
// expected
}
w.shutdown();
w.close();
dir.close();
}
@ -937,7 +935,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(f);
doc.add(f2);
w.addDocument(doc);
w.shutdown();
w.close();
IndexReader r = DirectoryReader.open(dir);
Terms tpv = r.getTermVectors(0).terms("field");
@ -982,11 +980,11 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir2 = newDirectory();
IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
writer2.addDocument(doc);
writer2.shutdown();
writer2.close();
IndexReader r1 = DirectoryReader.open(dir2);
writer.addIndexes(r1, r1);
writer.shutdown();
writer.close();
IndexReader r3 = DirectoryReader.open(dir);
assertEquals(5, r3.numDocs());
@ -1013,8 +1011,7 @@ public class TestIndexWriter extends LuceneTestCase {
// make a little directory for addIndexes
// LUCENE-2239: won't work with NIOFS/MMAP
adder = new MockDirectoryWrapper(random, new RAMDirectory());
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random));
IndexWriterConfig conf = newIndexWriterConfig(random, new MockAnalyzer(random));
IndexWriter w = new IndexWriter(adder, conf);
Document doc = new Document();
doc.add(newStringField(random, "id", "500", Field.Store.NO));
@ -1047,7 +1044,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
w.addDocument(doc);
w.deleteDocuments(new Term("id", "500"));
w.shutdown();
w.close();
}
@Override
@ -1055,7 +1052,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-2239: won't work with NIOFS/MMAP
MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory());
// When interrupt arrives in w.shutdown(), this can
// When interrupt arrives in w.close(), this can
// lead to double-write of files:
dir.setPreventDoubleWrite(false);
@ -1072,14 +1069,14 @@ public class TestIndexWriter extends LuceneTestCase {
// until this one successfully closes:
// w.rollback();
try {
w.shutdown();
w.close();
} catch (AlreadyClosedException ace) {
// OK
}
w = null;
}
IndexWriterConfig conf = newIndexWriterConfig(random,
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(2);
new MockAnalyzer(random)).setMaxBufferedDocs(2);
w = new IndexWriter(dir, conf);
Document doc = new Document();
@ -1131,7 +1128,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.forceMerge(1);
}
}
w.shutdown();
w.close();
w = null;
DirectoryReader.open(dir).close();
@ -1337,7 +1334,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
w.commit();
w.forceMerge(1); // force segment merge.
w.shutdown();
w.close();
IndexReader ir = DirectoryReader.open(dir);
StoredDocument doc2 = ir.document(0);
@ -1373,7 +1370,7 @@ public class TestIndexWriter extends LuceneTestCase {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.addDocument(new Document());
writer.shutdown();
writer.close();
dir.close();
}
@ -1463,7 +1460,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
//assertTrue(files.contains("_2.cfs"));
w.shutdown();
w.close();
r2.close();
dir.close();
@ -1506,7 +1503,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.deleteUnusedFiles();
assertEquals(1, DirectoryReader.listCommits(dir).size());
writer.shutdown();
writer.close();
dir.close();
}
@ -1515,7 +1512,7 @@ public class TestIndexWriter extends LuceneTestCase {
// then IndexWriter ctor succeeds. Previously (LUCENE-2386) it failed
// when listAll() was called in IndexFileDeleter.
Directory dir = newFSDirectory(createTempDir("emptyFSDirNoLock"), NoLockFactory.getNoLockFactory());
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).shutdown();
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).close();
dir.close();
}
@ -1599,7 +1596,7 @@ public class TestIndexWriter extends LuceneTestCase {
.setMaxBufferedDocs(2)
.setOpenMode(OpenMode.CREATE));
w2.shutdown();
w2.close();
// If we don't do that, the test fails on Windows
w.rollback();
@ -1637,7 +1634,7 @@ public class TestIndexWriter extends LuceneTestCase {
indexWriter.addDocument(doc);
}
indexWriter.shutdown();
indexWriter.close();
TestUtil.checkIndex(dir);
@ -1727,7 +1724,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.forceMerge(1);
IndexReader reader = w.getReader();
w.shutdown();
w.close();
// Make sure all terms < max size were indexed
assertEquals(1, reader.docFreq(new Term("content", "abc")));
@ -1765,7 +1762,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
reader = w.getReader();
w.shutdown();
w.close();
assertEquals(1, reader.docFreq(new Term("content", bigTerm)));
reader.close();
@ -1775,7 +1772,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testDeleteAllNRTLeftoverFiles() throws Exception {
Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
for(int i = 0; i < 20; i++) {
for(int j = 0; j < 100; ++j) {
@ -1791,13 +1788,13 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(d.listAll().length <= 2);
}
w.shutdown();
w.close();
d.close();
}
public void testNRTReaderVersion() throws Exception {
Directory d = new MockDirectoryWrapper(random(), new RAMDirectory());
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random())));
Document doc = new Document();
doc.add(newStringField("id", "0", Field.Store.YES));
w.addDocument(doc);
@ -1813,7 +1810,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.deleteDocuments(new Term("id", "0"));
r = w.getReader();
w.shutdown();
w.close();
long version3 = r.getVersion();
r.close();
assert(version3 > version2);
@ -1835,14 +1832,14 @@ public class TestIndexWriter extends LuceneTestCase {
} catch (LockObtainFailedException lofe) {
// expected
}
w1.shutdown();
w1.close();
d.close();
}
public void testChangeIndexOptions() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
new IndexWriterConfig(new MockAnalyzer(random())));
FieldType docsAndFreqs = new FieldType(TextField.TYPE_NOT_STORED);
docsAndFreqs.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
@ -1858,20 +1855,20 @@ public class TestIndexWriter extends LuceneTestCase {
doc = new Document();
doc.add(new Field("field", "a b c", docsOnly));
w.addDocument(doc);
w.shutdown();
w.close();
dir.close();
}
public void testOnlyUpdateDocuments() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
new IndexWriterConfig(new MockAnalyzer(random())));
final List<Document> docs = new ArrayList<>();
docs.add(new Document());
w.updateDocuments(new Term("foo", "bar"),
docs);
w.shutdown();
w.close();
dir.close();
}
@ -1879,17 +1876,17 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenClose() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
new IndexWriterConfig(new MockAnalyzer(random())));
w.prepareCommit();
try {
w.shutdown();
w.close();
fail("should have hit exception");
} catch (IllegalStateException ise) {
// expected
}
w.commit();
w.shutdown();
w.close();
IndexReader r = DirectoryReader.open(dir);
assertEquals(0, r.maxDoc());
r.close();
@ -1900,7 +1897,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenRollback() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
new IndexWriterConfig(new MockAnalyzer(random())));
w.prepareCommit();
w.rollback();
@ -1912,7 +1909,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testPrepareCommitThenRollback2() throws Exception {
Directory dir = newDirectory();
IndexWriter w = new IndexWriter(dir,
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
new IndexWriterConfig(new MockAnalyzer(random())));
w.commit();
w.addDocument(new Document());
@ -1956,7 +1953,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(f2);
doc.add(f);
w.addDocument(doc);
w.shutdown();
w.close();
dir.close();
}
@ -1968,14 +1965,14 @@ public class TestIndexWriter extends LuceneTestCase {
IndexWriter iw = new IndexWriter(dir,
newIndexWriterConfig(new MockAnalyzer(random())));
iw.addDocument(new Document());
iw.shutdown();
iw.close();
try {
// Create my own random file:
IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random()));
out.writeByte((byte) 42);
out.close();
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).shutdown();
new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))).close();
assertTrue(slowFileExists(dir, "myrandomfile"));
} finally {
@ -2000,7 +1997,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(new TextField("body", "test of gaps", Field.Store.NO));
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
PhraseQuery pq = new PhraseQuery();
pq.add(new Term("body", "just"), 0);
@ -2031,7 +2028,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(new TextField("body", "test of gaps", Field.Store.NO));
iw.addDocument(doc);
IndexReader ir = iw.getReader();
iw.shutdown();
iw.close();
IndexSearcher is = newSearcher(ir);
PhraseQuery pq = new PhraseQuery();
pq.add(new Term("body", "just"), 0);
@ -2045,7 +2042,7 @@ public class TestIndexWriter extends LuceneTestCase {
// LUCENE-4398
public void testRotatingFieldNames() throws Exception {
Directory dir = newFSDirectory(createTempDir("TestIndexWriter.testChangingFields"));
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setRAMBufferSizeMB(0.2);
iwc.setMaxBufferedDocs(-1);
IndexWriter w = new IndexWriter(dir, iwc);
@ -2084,7 +2081,7 @@ public class TestIndexWriter extends LuceneTestCase {
upto = 0;
}
}
w.shutdown();
w.close();
dir.close();
}
@ -2126,7 +2123,7 @@ public class TestIndexWriter extends LuceneTestCase {
"value2", r.getIndexCommit().getUserData().get("key"));
r.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -2138,13 +2135,13 @@ public class TestIndexWriter extends LuceneTestCase {
put("key", "value");
}});
assertEquals("value", writer.getCommitData().get("key"));
writer.shutdown();
writer.close();
// validate that it's also visible when opening a new IndexWriter
writer = new IndexWriter(dir, newIndexWriterConfig(null)
.setOpenMode(OpenMode.APPEND));
assertEquals("value", writer.getCommitData().get("key"));
writer.shutdown();
writer.close();
dir.close();
}
@ -2170,7 +2167,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader ir = iw.getReader();
assertEquals(3, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -2192,7 +2189,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader ir = iw.getReader();
assertEquals(3, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -2214,7 +2211,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader ir = iw.getReader();
assertEquals(3, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -2246,7 +2243,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader ir = iw.getReader();
assertEquals(3, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -2277,7 +2274,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader ir = iw.getReader();
assertEquals(3, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -2338,7 +2335,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
}
assertTrue(liveIds.isEmpty());
w.shutdown();
w.close();
IOUtils.close(reader, dir);
}
@ -2390,7 +2387,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
}
assertTrue(liveIds.isEmpty());
w.shutdown();
w.close();
IOUtils.close(reader, dir);
}
@ -2422,7 +2419,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertNotNull(e.getMessage());
assertEquals("boom", e.getMessage());
}
w.shutdown();
w.close();
IOUtils.close(dir);
}
@ -2482,7 +2479,7 @@ public class TestIndexWriter extends LuceneTestCase {
try {
if ((i & 1) == 0) {
new IndexWriter(dir, iwc).shutdown();
new IndexWriter(dir, iwc).close();
} else {
new IndexWriter(dir, iwc).rollback();
}
@ -2547,14 +2544,14 @@ public class TestIndexWriter extends LuceneTestCase {
writer.waitForMerges();
writer.commit();
assertFalse(writer.hasUncommittedChanges());
writer.shutdown();
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
assertFalse(writer.hasUncommittedChanges());
writer.addDocument(doc);
assertTrue(writer.hasUncommittedChanges());
writer.shutdown();
writer.close();
dir.close();
}
@ -2582,14 +2579,14 @@ public class TestIndexWriter extends LuceneTestCase {
}
evilWriter.deleteDocuments(new MatchAllDocsQuery());
evilWriter.forceMerge(1);
evilWriter.shutdown();
evilWriter.close();
dir.close();
}
// LUCENE-5239
public void testDeleteSameTermAcrossFields() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new TextField("a", "foo", Field.Store.NO));
@ -2601,7 +2598,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.deleteDocuments(new Term("a", "xxx"));
w.deleteDocuments(new Term("b", "foo"));
IndexReader r = w.getReader();
w.shutdown();
w.close();
// Make sure document was not (incorrectly) deleted:
assertEquals(1, r.numDocs());
@ -2628,7 +2625,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
iwriter.commit();
assertFalse(iwriter.hasUncommittedChanges());
iwriter.shutdown();
iwriter.close();
directory.close();
}
@ -2668,35 +2665,13 @@ public class TestIndexWriter extends LuceneTestCase {
dir.close();
}
public void testUncommittedChanges() throws IOException {
Directory dir = newDirectory();
// If version is < 50 IW.close should throw an exception
// on uncommitted changes:
IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new SortedDocValuesField("dv", new BytesRef("foo!")));
w.addDocument(doc);
try {
w.close();
fail("didn't hit exception");
} catch (RuntimeException re) {
// expected
assertTrue(re.getMessage().contains("this writer is closed, but some pending changes or running merges were discarded"));
}
w.rollback();
dir.close();
}
public void testCloseWhileMergeIsRunning() throws IOException {
Directory dir = newDirectory();
final CountDownLatch mergeStarted = new CountDownLatch(1);
final CountDownLatch closeStarted = new CountDownLatch(1);
// If version is < 50 IW.close should throw an exception
// on still-running merges:
IndexWriterConfig iwc = newIndexWriterConfig(random(), Version.LUCENE_4_8, new MockAnalyzer(random()));
IndexWriterConfig iwc = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCommitOnClose(false);
LogDocMergePolicy mp = new LogDocMergePolicy();
mp.setMergeFactor(2);
iwc.setMergePolicy(mp);
@ -2742,17 +2717,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.commit();
w.addDocument(doc);
w.commit();
try {
w.close();
fail("didn't hit exception");
} catch (RuntimeException re) {
// expected
if (VERBOSE) {
System.out.println("GOT: " + re.getMessage());
}
assertTrue(re.getMessage().contains("this writer is closed, but some pending changes or running merges were discarded"));
}
w.rollback();
w.close();
dir.close();
}

View File

@ -46,7 +46,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
writer.shutdown();
writer.close();
Term searchTerm = new Term("content", "aaa");
DirectoryReader reader = DirectoryReader.open(dir);
@ -71,7 +71,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
// Now, close the writer:
writer.shutdown();
writer.close();
assertFalse("reader should not be current now", reader.isCurrent());
IndexReader r = DirectoryReader.open(dir);
@ -98,7 +98,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 14; i++) {
TestIndexWriter.addDoc(writer);
}
writer.shutdown();
writer.close();
Term searchTerm = new Term("content", "aaa");
IndexReader reader = DirectoryReader.open(dir);
@ -156,7 +156,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
r.close();
}
writer.shutdown();
writer.close();
IndexReader r = DirectoryReader.open(dir);
searcher = newSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
@ -215,7 +215,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for(int j=0;j<30;j++) {
TestIndexWriter.addDocWithIndex(writer, j);
}
writer.shutdown();
writer.close();
dir.resetMaxUsedSizeInBytes();
dir.setTrackDiskUsage(true);
@ -236,7 +236,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
long midDiskUsage = dir.getMaxUsedSizeInBytes();
dir.resetMaxUsedSizeInBytes();
writer.forceMerge(1);
writer.shutdown();
writer.close();
DirectoryReader.open(dir).close();
@ -278,7 +278,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for(int j=0;j<17;j++) {
TestIndexWriter.addDocWithIndex(writer, j);
}
writer.shutdown();
writer.close();
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
@ -309,7 +309,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.shutdown();
writer.close();
if (VERBOSE) {
System.out.println("TEST: writer closed");
@ -378,7 +378,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
threads[i].join();
}
assertFalse(failed.get());
w.shutdown();
w.close();
dir.close();
}
@ -418,7 +418,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
assertEquals(40, reader.numDocs());
reader.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -440,7 +440,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
w.addDocument(doc);
commitData.put("tag", "second");
w.setCommitData(commitData);
w.shutdown();
w.close();
// open "first" with IndexWriter
IndexCommit commit = null;
@ -463,7 +463,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
w.addDocument(doc);
commitData.put("tag", "third");
w.setCommitData(commitData);
w.shutdown();
w.close();
// make sure "second" commit is still there
commit = null;
@ -492,7 +492,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// that's expected !
}
// No changes still should generate a commit, because it's a new index.
writer.shutdown();
writer.close();
assertEquals("expected 1 commits!", 1, DirectoryReader.listCommits(dir).size());
dir.close();
}
@ -549,7 +549,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
assertEquals(40, reader.numDocs());
reader.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -606,7 +606,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
assertEquals(17, reader.numDocs());
reader.close();
writer.shutdown();
writer.close();
dir.close();
}
@ -617,7 +617,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
writer.prepareCommit();
writer.commit();
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(0, reader.numDocs());
@ -632,7 +632,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
.setMaxBufferedDocs(2));
for(int j=0;j<17;j++)
TestIndexWriter.addDoc(w);
w.shutdown();
w.close();
DirectoryReader r = DirectoryReader.open(dir);
// commit(Map) never called for this index
@ -646,7 +646,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
Map<String,String> data = new HashMap<>();
data.put("label", "test1");
w.setCommitData(data);
w.shutdown();
w.close();
r = DirectoryReader.open(dir);
assertEquals("test1", r.getIndexCommit().getUserData().get("label"));
@ -654,7 +654,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
w = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));
w.forceMerge(1);
w.shutdown();
w.close();
dir.close();
}

View File

@ -20,14 +20,11 @@ package org.apache.lucene.index;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.FieldInfosFormat;
import org.apache.lucene.codecs.StoredFieldsFormat;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.DocumentsWriterPerThread.IndexingChain;
@ -58,7 +55,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testDefaults() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
assertEquals(MockAnalyzer.class, conf.getAnalyzer().getClass());
assertNull(conf.getIndexCommit());
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
@ -144,7 +141,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
Directory dir = newDirectory();
// test that IWC cannot be reused across two IWs
IndexWriterConfig conf = newIndexWriterConfig(null);
new RandomIndexWriter(random(), dir, conf).shutdown();
new RandomIndexWriter(random(), dir, conf).close();
// this should fail
try {
@ -193,7 +190,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testToString() throws Exception {
String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).toString();
String str = new IndexWriterConfig(new MockAnalyzer(random())).toString();
for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
int modifiers = f.getModifiers();
if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
@ -213,7 +210,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
@Test
public void testInvalidValues() throws Exception {
IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
// Test IndexDeletionPolicy
assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
@ -321,7 +318,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
public void testLiveChangeToCFS() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy(true));
// Start false:
iwc.setUseCompoundFile(false);
@ -353,7 +350,7 @@ public class TestIndexWriterConfig extends LuceneTestCase {
w.forceMerge(1);
w.commit();
assertTrue("Expected CFS after merge", w.newestSegment().info.getUseCompoundFile());
w.shutdown();
w.close();
dir.close();
}

View File

@ -89,7 +89,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
hitCount = getHitCount(dir, term);
assertEquals(0, hitCount);
modifier.shutdown();
modifier.close();
dir.close();
}
@ -124,7 +124,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
reader = DirectoryReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
modifier.shutdown();
modifier.close();
dir.close();
}
@ -138,7 +138,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
assertEquals(3, writer.getFlushDeletesCount());
writer.shutdown();
writer.close();
dir.close();
}
@ -179,7 +179,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
int hitCount = getHitCount(dir, new Term("id", String.valueOf(id)));
assertEquals(1, hitCount);
reader.close();
modifier.shutdown();
modifier.close();
dir.close();
}
}
@ -213,7 +213,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(dir);
assertEquals(5, reader.numDocs());
modifier.shutdown();
modifier.close();
reader.close();
dir.close();
}
@ -257,7 +257,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
assertEquals(2, reader.numDocs());
reader.close();
modifier.shutdown();
modifier.close();
dir.close();
}
@ -303,7 +303,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
assertEquals(2, reader.numDocs());
reader.close();
modifier.shutdown();
modifier.close();
dir.close();
}
@ -360,7 +360,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
thread.join();
}
modifier.shutdown();
modifier.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(reader.maxDoc(), 0);
assertEquals(reader.numDocs(), 0);
@ -507,7 +507,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
d.add(new NumericDocValuesField("dv", i));
writer.addDocument(d);
}
writer.shutdown();
writer.close();
long diskUsage = startDir.sizeInBytes();
long diskFree = diskUsage + 10;
@ -593,7 +593,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
docId += 12;
}
modifier.shutdown();
modifier.close();
}
success = true;
if (0 == x) {
@ -858,7 +858,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// Make sure the delete was successfully flushed:
assertEquals(0, hitCount);
modifier.shutdown();
modifier.close();
dir.close();
}
@ -915,14 +915,14 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
}
modifier.shutdown();
modifier.close();
TestIndexWriter.assertNoUnreferencedFiles(dir, "docsWriter.abort() failed to delete unreferenced files");
dir.close();
}
public void testDeleteNullQuery() throws IOException {
Directory dir = newDirectory();
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
for (int i = 0; i < 5; i++) {
addDoc(modifier, i, 2*i);
@ -931,7 +931,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.deleteDocuments(new TermQuery(new Term("nada", "nada")));
modifier.commit();
assertEquals(5, modifier.numDocs());
modifier.shutdown();
modifier.close();
dir.close();
}
@ -963,7 +963,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
r.close();
}
w.shutdown();
w.close();
dir.close();
}
@ -1014,7 +1014,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
assertTrue("flush happened too quickly during " + (doIndexing ? "indexing" : "deleting") + " count=" + count, count > 2500);
}
w.shutdown();
w.close();
dir.close();
}
@ -1063,7 +1063,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
fail("delete's were not applied");
}
}
w.shutdown();
w.close();
dir.close();
}
@ -1106,7 +1106,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
fail("delete's were not applied at count=" + flushAtDelCount);
}
}
w.shutdown();
w.close();
dir.close();
}
@ -1158,14 +1158,14 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
closing.set(true);
assertTrue(sawAfterFlush.get());
w.shutdown();
w.close();
dir.close();
}
// LUCENE-4455
public void testDeletesCheckIndexOutput() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setMaxBufferedDocs(2);
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -1181,7 +1181,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
w.deleteDocuments(new Term("field", "0"));
w.commit();
assertEquals(1, w.getSegmentCount());
w.shutdown();
w.close();
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
CheckIndex checker = new CheckIndex(dir);
@ -1192,10 +1192,10 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// Segment should have deletions:
assertTrue(s.contains("has deletions"));
iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = new IndexWriterConfig(new MockAnalyzer(random()));
w = new IndexWriter(dir, iwc);
w.forceMerge(1);
w.shutdown();
w.close();
bos = new ByteArrayOutputStream(1024);
checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8), false);
@ -1210,22 +1210,22 @@ public class TestIndexWriterDelete extends LuceneTestCase {
Directory d = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(d, iwc);
Document doc = new Document();
w.addDocument(doc);
w.addDocument(doc);
w.addDocument(doc);
w.shutdown();
w.close();
iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
w = new IndexWriter(d, iwc);
IndexReader r = DirectoryReader.open(w, false);
assertTrue(w.tryDeleteDocument(r, 1));
assertTrue(w.tryDeleteDocument(r.leaves().get(0).reader(), 0));
r.close();
w.shutdown();
w.close();
r = DirectoryReader.open(d);
assertEquals(2, r.numDeletedDocs());

View File

@ -271,7 +271,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.commit();
try {
writer.shutdown();
writer.close();
} catch (Throwable t) {
System.out.println("exception during close:");
t.printStackTrace(System.out);
@ -318,7 +318,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.commit();
try {
writer.shutdown();
writer.close();
} catch (Throwable t) {
System.out.println("exception during close:");
t.printStackTrace(System.out);
@ -385,7 +385,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} catch (RuntimeException re) {
// expected
}
w.shutdown();
w.close();
dir.close();
}
@ -418,7 +418,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
// expected
}
w.addDocument(doc);
w.shutdown();
w.close();
dir.close();
}
@ -459,7 +459,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
((ConcurrentMergeScheduler) w.getConfig().getMergeScheduler()).sync();
assertTrue(testPoint.failed);
w.shutdown();
w.close();
dir.close();
}
@ -515,7 +515,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
doc.add(newTextField("content", "aa bb cc dd", Field.Store.NO));
writer.addDocument(doc);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
final Term t = new Term("content", "aa");
assertEquals(3, reader.docFreq(t));
@ -600,7 +600,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
}
assertTrue(hitError);
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(198, reader.docFreq(new Term("content", "aa")));
reader.close();
@ -652,7 +652,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.addDocument(doc);
writer.addDocument(doc);
}
writer.shutdown();
writer.close();
if (VERBOSE) {
System.out.println("TEST: open reader");
@ -684,7 +684,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for(int j=0;j<17;j++)
writer.addDocument(doc);
writer.forceMerge(1);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
int expected = 19+(1-i)*2;
@ -767,7 +767,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for(int t=0;t<NUM_THREAD;t++)
threads[t].join();
writer.shutdown();
writer.close();
}
IndexReader reader = DirectoryReader.open(dir);
@ -796,7 +796,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
for(int j=0;j<17;j++)
writer.addDocument(doc);
writer.forceMerge(1);
writer.shutdown();
writer.close();
reader = DirectoryReader.open(dir);
expected += 17-NUM_THREAD*NUM_ITER;
@ -871,7 +871,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
assertTrue(failure.didFail);
failure.clearDoFail();
writer.shutdown();
writer.close();
IndexReader reader = DirectoryReader.open(dir);
assertEquals(23, reader.numDocs());
@ -945,7 +945,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
w.addDocument(doc);
dir.failOn(failure);
try {
w.shutdown();
w.close();
fail();
} catch (IOException ioe) {
fail("expected only RuntimeException");
@ -969,7 +969,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexWriter w = new IndexWriter(startDir, conf);
for(int i=0;i<27;i++)
addDoc(w);
w.shutdown();
w.close();
int iter = TEST_NIGHTLY ? 200 : 10;
for(int i=0;i<iter;i++) {
@ -989,7 +989,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
fail("forceMerge threw IOException without root cause");
}
dir.setRandomIOExceptionRate(0);
w.shutdown();
w.close();
dir.close();
}
startDir.close();
@ -1005,7 +1005,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
.setInfoStream(new InfoStream() {
@Override
public void message(String component, final String message) {
if (message.startsWith("now flush at shutdown") && thrown.compareAndSet(false, true)) {
if (message.startsWith("now flush at close") && thrown.compareAndSet(false, true)) {
throw new OutOfMemoryError("fake OOME at " + message);
}
}
@ -1020,13 +1020,13 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}));
try {
writer.shutdown();
writer.close();
fail("OutOfMemoryError expected");
}
catch (final OutOfMemoryError expected) {}
// throws IllegalStateEx w/o bug fix
writer.shutdown();
writer.close();
dir.close();
}
@ -1077,7 +1077,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// close
writer.shutdown();
writer.close();
long gen = SegmentInfos.getLastCommitGeneration(dir);
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
@ -1101,7 +1101,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
reader.close();
// should remove the corrumpted segments_N
new IndexWriter(dir, newIndexWriterConfig(null)).shutdown();
new IndexWriter(dir, newIndexWriterConfig(null)).close();
dir.close();
}
@ -1122,7 +1122,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// close
writer.shutdown();
writer.close();
long gen = SegmentInfos.getLastCommitGeneration(dir);
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
@ -1178,7 +1178,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// close
writer.shutdown();
writer.close();
long gen = SegmentInfos.getLastCommitGeneration(dir);
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
@ -1226,7 +1226,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// close
writer.shutdown();
writer.close();
long gen = SegmentInfos.getLastCommitGeneration(dir);
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
@ -1269,7 +1269,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// close
writer.shutdown();
writer.close();
dir.close();
}
@ -1324,7 +1324,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
document = new Document();
document.add(new TextField("field", "a field", Field.Store.YES));
w.addDocument(document);
w.shutdown();
w.close();
IndexReader reader = DirectoryReader.open(dir);
assertTrue(reader.numDocs() > 0);
SegmentInfos sis = new SegmentInfos();
@ -1409,7 +1409,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
final IndexReader r = w.getReader();
w.shutdown();
w.close();
final IndexSearcher s = newSearcher(r);
PhraseQuery pq = new PhraseQuery();
@ -1490,7 +1490,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
final IndexReader r = w.getReader();
w.shutdown();
w.close();
final IndexSearcher s = newSearcher(r);
PhraseQuery pq = new PhraseQuery();
@ -1510,7 +1510,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredField() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
iw.addDocument(doc);
@ -1521,7 +1521,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (IllegalArgumentException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1533,7 +1533,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredFieldReuse() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
Field theField = new StoredField("foo", "hello", StoredField.TYPE);
@ -1545,7 +1545,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (IllegalArgumentException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1557,7 +1557,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredBytesField() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
iw.addDocument(doc);
@ -1570,7 +1570,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (NullPointerException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1582,7 +1582,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredBytesFieldReuse() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
Field theField = new StoredField("foo", new BytesRef("hello").bytes);
@ -1595,7 +1595,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (NullPointerException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1607,7 +1607,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredBytesRefField() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
iw.addDocument(doc);
@ -1620,7 +1620,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (IllegalArgumentException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1632,7 +1632,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
public void testNullStoredBytesRefFieldReuse() throws Exception {
Directory dir = newDirectory();
Analyzer analyzer = new MockAnalyzer(random());
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
Field theField = new StoredField("foo", new BytesRef("hello"));
@ -1645,7 +1645,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (IllegalArgumentException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
assertEquals(1, r.numDocs());
@ -1666,7 +1666,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
return -2;
}
};
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(analyzer));
// add good document
Document doc = new Document();
iw.addDocument(doc);
@ -1676,7 +1676,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
iw.addDocument(doc);
fail("didn't get expected exception");
} catch (IllegalArgumentException expected) {}
iw.shutdown();
iw.close();
// make sure we see our good doc
DirectoryReader r = DirectoryReader.open(dir);
@ -1709,7 +1709,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Directory d = new MockDirectoryWrapper(random(), uoe);
IndexWriter iw = new IndexWriter(d, newIndexWriterConfig(null));
iw.addDocument(new Document());
iw.shutdown();
iw.close();
uoe.doFail = true;
try {
new IndexWriter(d, newIndexWriterConfig(null));
@ -1740,7 +1740,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} catch (IllegalArgumentException expected) {
// expected exception
}
iw.shutdown();
iw.close();
dir.close();
}
@ -1759,13 +1759,13 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Field field = new TextField("foo", overflowingTokenStream);
doc.add(field);
iw.addDocument(doc);
iw.shutdown();
iw.close();
dir.close();
}
public void testBoostOmitNorms() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergePolicy(newLogMergePolicy());
IndexWriter iw = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -1824,7 +1824,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
assertEquals(1, ir.numDocs());
assertEquals("sometext", ir.document(0).get("field1"));
ir.close();
iw.shutdown();
iw.close();
dir.close();
}
@ -1857,7 +1857,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
dir.failOn(failure);
// Create an index with one document
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter iw = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new StringField("foo", "bar", Field.Store.NO));
@ -1866,12 +1866,12 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
DirectoryReader ir = DirectoryReader.open(dir);
assertEquals(1, ir.numDocs());
ir.close();
iw.shutdown();
iw.close();
// Open and close the index a few times
for (int i = 0; i < 10; i++) {
failure.setDoFail();
iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
iwc = new IndexWriterConfig(new MockAnalyzer(random()));
try {
iw = new IndexWriter(dir, iwc);
} catch (CorruptIndexException ex) {
@ -1881,7 +1881,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
continue;
}
failure.clearDoFail();
iw.shutdown();
iw.close();
ir = DirectoryReader.open(dir);
assertEquals("lost document after iteration: " + i, 1, ir.numDocs());
ir.close();
@ -2047,7 +2047,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
doClose = true;
w.commit();
w.shutdown();
w.close();
w = null;
}
@ -2058,7 +2058,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (ioe instanceof FakeIOException || (ioe.getCause() != null && ioe.getCause() instanceof FakeIOException)) {
// expected
if (VERBOSE) {
System.out.println("TEST: w.shutdown() hit expected IOE");
System.out.println("TEST: w.close() hit expected IOE");
}
} else {
throw ioe;
@ -2073,7 +2073,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
System.out.println(" now 2nd close writer");
}
try {
w.shutdown();
w.close();
} catch (AlreadyClosedException ace) {
// OK
}
@ -2120,7 +2120,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: close writer");
}
w.shutdown();
w.close();
w = null;
}
@ -2128,7 +2128,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
if (w != null) {
w.shutdown();
w.close();
}
// Final verify:
@ -2163,7 +2163,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
};
Directory dir = newMockDirectory(); // we want to ensure we don't leak any locks or file handles
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig iwc = new IndexWriterConfig(null);
iwc.setInfoStream(evilInfoStream);
IndexWriter iw = new IndexWriter(dir, iwc);
Document doc = new Document();
@ -2232,7 +2232,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
});
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
IndexWriterConfig iwc = new IndexWriterConfig(null);
IndexWriter iw = new IndexWriter(dir, iwc);
Document doc = new Document();
for (int i = 0; i < 10; i++) {

View File

@ -200,7 +200,7 @@ public class TestIndexWriterExceptions2 extends LuceneTestCase {
}
try {
iw.shutdown();
iw.close();
} catch (Exception e) {
if (e.getMessage() != null && e.getMessage().startsWith("Fake IOException")) {
exceptionStream.println("\nTEST: got expected fake exc:" + e.getMessage());

View File

@ -46,7 +46,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
.setMergePolicy(ldmp));
for(int j=0;j<numDocs;j++)
writer.addDocument(doc);
writer.shutdown();
writer.close();
SegmentInfos sis = new SegmentInfos();
sis.read(dir);
@ -57,7 +57,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setMergePolicy(ldmp));
writer.forceMerge(3);
writer.shutdown();
writer.close();
sis = new SegmentInfos();
sis.read(dir);
@ -110,7 +110,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
else
assertEquals("seg: " + segCount, 7, optSegCount);
}
writer.shutdown();
writer.close();
dir.close();
}
@ -136,7 +136,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
// we see the doc stores get merged
writer.commit();
TestIndexWriter.addDocWithIndex(writer, 500);
writer.shutdown();
writer.close();
if (VERBOSE) {
System.out.println("TEST: start disk usage");
@ -157,7 +157,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
.setOpenMode(OpenMode.APPEND)
.setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.shutdown();
writer.close();
long maxDiskUsage = dir.getMaxUsedSizeInBytes();
assertTrue("forceMerge used too much temporary space: starting usage was " + startDiskUsage + " bytes; max temp usage was " + maxDiskUsage + " but should have been " + (4*startDiskUsage) + " (= 4X starting usage)",
maxDiskUsage <= 4*startDiskUsage);
@ -166,7 +166,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
// Test calling forceMerge(1, false) whereby forceMerge is kicked
// off but we don't wait for it to finish (but
// writer.shutdown()) does wait
// writer.close()) does wait
public void testBackgroundForceMerge() throws IOException {
Directory dir = newDirectory();
@ -185,7 +185,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
writer.forceMerge(1, false);
if (0 == pass) {
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertEquals(1, reader.leaves().size());
reader.close();
@ -194,7 +194,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
// NOT included in the merging
writer.addDocument(doc);
writer.addDocument(doc);
writer.shutdown();
writer.close();
DirectoryReader reader = DirectoryReader.open(dir);
assertTrue(reader.leaves().size() > 1);

View File

@ -38,10 +38,10 @@ public class TestIndexWriterLockRelease extends LuceneTestCase {
public void testIndexWriterLockRelease() throws IOException {
Directory dir = newFSDirectory(createTempDir("testLockRelease"));
try {
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException | NoSuchFileException e) {
try {
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException | NoSuchFileException e1) {
}
} finally {

Some files were not shown because too many files have changed in this diff Show More