LUCENE-3606: Clean up IR.open(...) methods to no longer accept readOnly and IndexDeletionPolicy: Core, Contrib, Solr (Modules need fixing, benchmark was broken before, too)

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3606@1210305 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Uwe Schindler 2011-12-05 01:41:38 +00:00
parent e7876964b1
commit 559205883f
78 changed files with 292 additions and 383 deletions

View File

@ -58,7 +58,7 @@ public class FieldTermStack {
// writer.addDocument( doc );
// writer.close();
// IndexReader reader = IndexReader.open( dir, true );
// IndexReader reader = IndexReader.open(dir1);
// new FieldTermStack( reader, 0, "f", fieldQuery );
// reader.close();
//}

View File

@ -68,7 +68,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -116,7 +116,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -191,7 +191,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -237,7 +237,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -281,7 +281,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);

View File

@ -1677,7 +1677,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
* writer = new IndexWriter(ramDir,bigramAnalyzer , true); Document d = new
* Document(); Field f = new Field(FIELD_NAME, "java abc def", true, true,
* true); d.add(f); writer.addDocument(d); writer.close(); IndexReader reader =
* IndexReader.open(ramDir, true);
* IndexReader.open(ramDir);
*
* IndexSearcher searcher=new IndexSearcher(reader); query =
* QueryParser.parse("abc", FIELD_NAME, bigramAnalyzer);
@ -1763,7 +1763,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
writer.addDocument(doc, analyzer);
writer.forceMerge(1);
writer.close();
reader = IndexReader.open(ramDir, true);
reader = IndexReader.open(ramDir);
numHighlights = 0;
}

View File

@ -113,7 +113,7 @@ public class TokenSourcesTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -162,7 +162,7 @@ public class TokenSourcesTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -210,7 +210,7 @@ public class TokenSourcesTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);
@ -259,7 +259,7 @@ public class TokenSourcesTest extends LuceneTestCase {
} finally {
indexWriter.close();
}
final IndexReader indexReader = IndexReader.open(directory, true);
final IndexReader indexReader = IndexReader.open(directory);
try {
assertEquals(1, indexReader.numDocs());
final IndexSearcher indexSearcher = newSearcher(indexReader);

View File

@ -364,7 +364,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
writer.addDocument( doc );
writer.close();
if (reader != null) reader.close();
reader = IndexReader.open( dir, true );
reader = IndexReader.open(dir);
}
// make 1 doc with multi valued & not analyzed field
@ -383,7 +383,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
writer.addDocument( doc );
writer.close();
if (reader != null) reader.close();
reader = IndexReader.open( dir, true );
reader = IndexReader.open(dir);
}
protected void makeIndexShortMV() throws Exception {

View File

@ -147,7 +147,7 @@ public class SimpleFragmentsBuilderTest extends AbstractTestCase {
writer.addDocument( doc );
writer.close();
if (reader != null) reader.close();
reader = IndexReader.open( dir, true );
reader = IndexReader.open(dir);
}
public void test1StrMV() throws Exception {

View File

@ -143,7 +143,7 @@ public class MultiPassIndexSplitter {
System.err.println("Invalid input index - skipping: " + file);
continue;
}
indexes.add(IndexReader.open(dir, true));
indexes.add(IndexReader.open(dir));
}
}
if (outDir == null) {

View File

@ -78,7 +78,7 @@ public class HighFreqTerms {
}
}
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
TermStats[] terms = getHighFreqTerms(reader, numTerms, field);
if (!IncludeTermFreqs) {
//default HighFreqTerms behavior

View File

@ -69,7 +69,7 @@ public class TestIndexSplitter extends LuceneTestCase {
String splitSegName = is.infos.info(1).name;
is.split(destDir, new String[] {splitSegName});
Directory fsDirDest = newFSDirectory(destDir);
IndexReader r = IndexReader.open(fsDirDest, true);
IndexReader r = IndexReader.open(fsDirDest);
assertEquals(50, r.maxDoc());
r.close();
fsDirDest.close();
@ -81,14 +81,14 @@ public class TestIndexSplitter extends LuceneTestCase {
IndexSplitter.main(new String[] {dir.getAbsolutePath(), destDir2.getAbsolutePath(), splitSegName});
assertEquals(4, destDir2.listFiles().length);
Directory fsDirDest2 = newFSDirectory(destDir2);
r = IndexReader.open(fsDirDest2, true);
r = IndexReader.open(fsDirDest2);
assertEquals(50, r.maxDoc());
r.close();
fsDirDest2.close();
// now remove the copied segment from src
IndexSplitter.main(new String[] {dir.getAbsolutePath(), "-d", splitSegName});
r = IndexReader.open(fsDir, true);
r = IndexReader.open(fsDir);
assertEquals(2, r.getSequentialSubReaders().length);
r.close();
fsDir.close();

View File

@ -123,7 +123,7 @@ public class TestAppendingCodec extends LuceneTestCase {
writer.addDocument(doc);
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, null, true, 1);
IndexReader reader = IndexReader.open(dir, 1);
assertEquals(2, reader.numDocs());
Document doc2 = reader.document(0);
assertEquals(text, doc2.get("f"));

View File

@ -43,7 +43,7 @@ public class TestHighFreqTerms extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false))
.setMaxBufferedDocs(2));
indexDocs(writer);
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
_TestUtil.checkIndex(dir);
}

View File

@ -302,21 +302,27 @@ public abstract class IndexReader implements Cloneable,Closeable {
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final Directory directory) throws CorruptIndexException, IOException {
return open(directory, null, null, true, DEFAULT_TERMS_INDEX_DIVISOR);
return open(directory, null, DEFAULT_TERMS_INDEX_DIVISOR);
}
/** Returns an IndexReader reading the index in the given
* Directory. You should pass readOnly=true, since it
* gives much better concurrent performance, unless you
* intend to do write operations (delete documents or
* change norms) with the reader.
/** Returns a IndexReader reading the index in the given
* Directory, with readOnly=true.
* @param directory the index directory
* @param readOnly true if no changes (deletions, norms) will be made with this IndexReader
* @param termInfosIndexDivisor Subsamples which indexed
* terms are loaded into RAM. This has the same effect as {@link
* IndexWriterConfig#setTermIndexInterval} except that setting
* must be done at indexing time while this setting can be
* set per reader. When set to N, then one in every
* N*termIndexInterval terms in the index is loaded into
* memory. By setting this to a value > 1 you can reduce
* memory usage, at the expense of higher latency when
* loading a TermInfo. The default value is 1. Set this
* to -1 to skip loading the terms index entirely.
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final Directory directory, boolean readOnly) throws CorruptIndexException, IOException {
return open(directory, null, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
public static IndexReader open(final Directory directory, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
return open(directory, null, termInfosIndexDivisor);
}
/**
@ -352,111 +358,12 @@ public abstract class IndexReader implements Cloneable,Closeable {
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final IndexCommit commit, boolean readOnly) throws CorruptIndexException, IOException {
return open(commit.getDirectory(), null, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
public static IndexReader open(final IndexCommit commit) throws CorruptIndexException, IOException {
return open(commit.getDirectory(), commit, DEFAULT_TERMS_INDEX_DIVISOR);
}
/** Expert: returns an IndexReader reading the index in
* the given Directory, with a custom {@link
* IndexDeletionPolicy}. You should pass readOnly=true,
* since it gives much better concurrent performance,
* unless you intend to do write operations (delete
* documents or change norms) with the reader.
* @param directory the index directory
* @param deletionPolicy a custom deletion policy (only used
* if you use this reader to perform deletes or to set
* norms); see {@link IndexWriter} for details.
* @param readOnly true if no changes (deletions, norms) will be made with this IndexReader
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final Directory directory, IndexDeletionPolicy deletionPolicy, boolean readOnly) throws CorruptIndexException, IOException {
return open(directory, deletionPolicy, null, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
}
/** Expert: returns an IndexReader reading the index in
* the given Directory, with a custom {@link
* IndexDeletionPolicy}. You should pass readOnly=true,
* since it gives much better concurrent performance,
* unless you intend to do write operations (delete
* documents or change norms) with the reader.
* @param directory the index directory
* @param deletionPolicy a custom deletion policy (only used
* if you use this reader to perform deletes or to set
* norms); see {@link IndexWriter} for details.
* @param readOnly true if no changes (deletions, norms) will be made with this IndexReader
* @param termInfosIndexDivisor Subsamples which indexed
* terms are loaded into RAM. This has the same effect as {@link
* IndexWriterConfig#setTermIndexInterval} except that setting
* must be done at indexing time while this setting can be
* set per reader. When set to N, then one in every
* N*termIndexInterval terms in the index is loaded into
* memory. By setting this to a value > 1 you can reduce
* memory usage, at the expense of higher latency when
* loading a TermInfo. The default value is 1. Set this
* to -1 to skip loading the terms index entirely.
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final Directory directory, IndexDeletionPolicy deletionPolicy, boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
return open(directory, deletionPolicy, null, readOnly, termInfosIndexDivisor);
}
/** Expert: returns an IndexReader reading the index in
* the given Directory, using a specific commit and with
* a custom {@link IndexDeletionPolicy}. You should pass
* readOnly=true, since it gives much better concurrent
* performance, unless you intend to do write operations
* (delete documents or change norms) with the reader.
* @param commit the specific {@link IndexCommit} to open;
* see {@link IndexReader#listCommits} to list all commits
* in a directory
* @param deletionPolicy a custom deletion policy (only used
* if you use this reader to perform deletes or to set
* norms); see {@link IndexWriter} for details.
* @param readOnly true if no changes (deletions, norms) will be made with this IndexReader
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final IndexCommit commit, IndexDeletionPolicy deletionPolicy, boolean readOnly) throws CorruptIndexException, IOException {
return open(commit.getDirectory(), deletionPolicy, commit, readOnly, DEFAULT_TERMS_INDEX_DIVISOR);
}
/** Expert: returns an IndexReader reading the index in
* the given Directory, using a specific commit and with
* a custom {@link IndexDeletionPolicy}. You should pass
* readOnly=true, since it gives much better concurrent
* performance, unless you intend to do write operations
* (delete documents or change norms) with the reader.
* @param commit the specific {@link IndexCommit} to open;
* see {@link IndexReader#listCommits} to list all commits
* in a directory
* @param deletionPolicy a custom deletion policy (only used
* if you use this reader to perform deletes or to set
* norms); see {@link IndexWriter} for details.
* @param readOnly true if no changes (deletions, norms) will be made with this IndexReader
* @param termInfosIndexDivisor Subsamples which indexed
* terms are loaded into RAM. This has the same effect as {@link
* IndexWriterConfig#setTermIndexInterval} except that setting
* must be done at indexing time while this setting can be
* set per reader. When set to N, then one in every
* N*termIndexInterval terms in the index is loaded into
* memory. By setting this to a value > 1 you can reduce
* memory usage, at the expense of higher latency when
* loading a TermInfo. The default value is 1. Set this
* to -1 to skip loading the terms index entirely. This is only useful in
* advanced situations when you will only .next() through all terms;
* attempts to seek will hit an exception.
*
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
public static IndexReader open(final IndexCommit commit, IndexDeletionPolicy deletionPolicy, boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
return open(commit.getDirectory(), deletionPolicy, commit, readOnly, termInfosIndexDivisor);
}
private static IndexReader open(final Directory directory, final IndexDeletionPolicy deletionPolicy, final IndexCommit commit, final boolean readOnly, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
// nocommit: deletionPolicy is ignored -> remove it, same for readonly
private static IndexReader open(final Directory directory, final IndexCommit commit, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
return DirectoryReader.open(directory, commit, termInfosIndexDivisor);
}

View File

@ -62,7 +62,7 @@ public class PersistentSnapshotDeletionPolicy extends SnapshotDeletionPolicy {
* keeps a lock on the snapshots directory).
*/
public static Map<String, String> readSnapshotsInfo(Directory dir) throws IOException {
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
Map<String, String> snapshots = new HashMap<String, String>();
try {
int numDocs = r.numDocs();

View File

@ -128,7 +128,7 @@ public final class SearcherManager {
ExecutorService es) throws IOException {
this.es = es;
this.warmer = warmer;
currentSearcher = new IndexSearcher(IndexReader.open(dir, true), es);
currentSearcher = new IndexSearcher(IndexReader.open(dir), es);
}
/**

View File

@ -147,7 +147,7 @@ public abstract class CollationTestBase extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(farsiIndex, true);
IndexReader reader = IndexReader.open(farsiIndex);
IndexSearcher search = newSearcher(reader);
// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi

View File

@ -385,7 +385,7 @@ public class RandomIndexWriter implements Closeable {
w.commit();
switchDoDocValues();
if (r.nextBoolean()) {
return IndexReader.open(w.getDirectory(), new KeepOnlyLastCommitDeletionPolicy(), r.nextBoolean(), _TestUtil.nextInt(r, 1, 10));
return IndexReader.open(w.getDirectory(), _TestUtil.nextInt(r, 1, 10));
} else {
return w.getReader(applyDeletions);
}

View File

@ -200,7 +200,7 @@ public class QueryUtils {
Assert.assertEquals("writer has non-deleted docs",
0, w.numDocs());
w.close();
IndexReader r = IndexReader.open(d, true);
IndexReader r = IndexReader.open(d);
Assert.assertEquals("reader has wrong number of deleted docs",
numDeletedDocs, r.numDeletedDocs());
return r;

View File

@ -424,7 +424,7 @@ public class TestAddIndexes extends LuceneTestCase {
setUpDirs(dir, aux);
IndexReader reader = IndexReader.open(aux, false);
IndexReader reader = IndexReader.open(aux);
for (int i = 0; i < 20; i++) {
reader.deleteDocument(i);
}
@ -469,14 +469,14 @@ public class TestAddIndexes extends LuceneTestCase {
assertEquals(3, writer.getSegmentCount());
writer.close();
IndexReader reader = IndexReader.open(aux, false);
IndexReader reader = IndexReader.open(aux);
for (int i = 0; i < 27; i++) {
reader.deleteDocument(i);
}
assertEquals(3, reader.numDocs());
reader.close();
reader = IndexReader.open(aux2, false);
reader = IndexReader.open(aux2);
for (int i = 0; i < 8; i++) {
reader.deleteDocument(i);
}
@ -525,7 +525,7 @@ public class TestAddIndexes extends LuceneTestCase {
}
private void verifyNumDocs(Directory dir, int numDocs) throws IOException {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(numDocs, reader.maxDoc());
assertEquals(numDocs, reader.numDocs());
reader.close();
@ -533,7 +533,7 @@ public class TestAddIndexes extends LuceneTestCase {
private void verifyTermDocs(Directory dir, Term term, int numDocs)
throws IOException {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
DocsEnum docsEnum = _TestUtil.docs(random, reader, term.field, term.bytes, null, null, false);
int count = 0;
while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS)
@ -659,7 +659,7 @@ public class TestAddIndexes extends LuceneTestCase {
readers = new IndexReader[NUM_COPY];
for(int i=0;i<NUM_COPY;i++)
readers[i] = IndexReader.open(dir, true);
readers[i] = IndexReader.open(dir);
}
void launchThreads(final int numIter) {
@ -785,7 +785,7 @@ public class TestAddIndexes extends LuceneTestCase {
assertTrue("found unexpected failures: " + c.failures, c.failures.isEmpty());
IndexReader reader = IndexReader.open(c.dir2, true);
IndexReader reader = IndexReader.open(c.dir2);
assertEquals(expectedNumDocs, reader.numDocs());
reader.close();

View File

@ -112,7 +112,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
@Override
public void doWork() throws Throwable {
IndexReader r = IndexReader.open(directory, true);
IndexReader r = IndexReader.open(directory);
assertEquals(100, r.numDocs());
r.close();
}
@ -144,7 +144,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
}
writer.commit();
IndexReader r = IndexReader.open(directory, true);
IndexReader r = IndexReader.open(directory);
assertEquals(100, r.numDocs());
r.close();

View File

@ -113,7 +113,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
}
writer.close();
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
assertEquals(200+extraCount, reader.numDocs());
reader.close();
directory.close();
@ -158,7 +158,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
}
writer.close();
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
// Verify that we did not lose any deletes...
assertEquals(450, reader.numDocs());
reader.close();
@ -230,7 +230,7 @@ public class TestConcurrentMergeScheduler extends LuceneTestCase {
writer.close(false);
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
assertEquals((1+iter)*182, reader.numDocs());
reader.close();

View File

@ -68,7 +68,7 @@ public class TestCrash extends LuceneTestCase {
IndexWriter writer = initIndex(random, true);
MockDirectoryWrapper dir = (MockDirectoryWrapper) writer.getDirectory();
crash(writer);
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertTrue(reader.numDocs() < 157);
reader.close();
dir.close();
@ -85,7 +85,7 @@ public class TestCrash extends LuceneTestCase {
writer = initIndex(random, dir, false);
writer.close();
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertTrue(reader.numDocs() < 314);
reader.close();
dir.close();
@ -108,7 +108,7 @@ public class TestCrash extends LuceneTestCase {
dir.fileLength(l[i]) + " bytes");
*/
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertTrue(reader.numDocs() >= 157);
reader.close();
dir.close();
@ -129,7 +129,7 @@ public class TestCrash extends LuceneTestCase {
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertEquals(157, reader.numDocs());
reader.close();
dir.close();
@ -150,7 +150,7 @@ public class TestCrash extends LuceneTestCase {
for(int i=0;i<l.length;i++)
System.out.println("file " + i + " = " + l[i] + " " + dir.fileLength(l[i]) + " bytes");
*/
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertEquals(157, reader.numDocs());
reader.close();
dir.close();

View File

@ -73,7 +73,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
}
public void onCommit(List<? extends IndexCommit> commits) throws IOException {
IndexCommit lastCommit = commits.get(commits.size()-1);
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
assertEquals("lastCommit.segmentCount()=" + lastCommit.getSegmentCount() + " vs IndexReader.segmentCount=" + r.getSequentialSubReaders().length, r.getSequentialSubReaders().length, lastCommit.getSegmentCount());
r.close();
verifyCommitOrder(commits);
@ -259,7 +259,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
while(gen > 0) {
try {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
reader.close();
fileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
@ -351,7 +351,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Make sure we can open a reader on each commit:
for (final IndexCommit commit : commits) {
IndexReader r = IndexReader.open(commit, null, false);
IndexReader r = IndexReader.open(commit);
r.close();
}
@ -360,7 +360,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
dir.deleteFile(IndexFileNames.SEGMENTS_GEN);
long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
while(gen > 0) {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
reader.close();
dir.deleteFile(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", gen));
gen--;
@ -435,7 +435,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Should undo our rollback:
writer.rollback();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
// Still merged, still 11 docs
assertEquals(1, r.getSequentialSubReaders().length);
assertEquals(11, r.numDocs());
@ -450,7 +450,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Now 8 because we made another commit
assertEquals(7, IndexReader.listCommits(dir).size());
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
// Not fully merged because we rolled it back, and now only
// 10 docs
assertTrue(r.getSequentialSubReaders().length > 1);
@ -462,7 +462,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
assertEquals(1, r.getSequentialSubReaders().length);
assertEquals(10, r.numDocs());
r.close();
@ -474,7 +474,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Reader still sees fully merged index, because writer
// opened on the prior commit has not yet committed:
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
assertEquals(1, r.getSequentialSubReaders().length);
assertEquals(10, r.numDocs());
r.close();
@ -482,7 +482,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer.close();
// Now reader sees not-fully-merged index:
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
assertTrue(r.getSequentialSubReaders().length > 1);
assertEquals(10, r.numDocs());
r.close();
@ -535,7 +535,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Simplistic check: just verify the index is in fact
// readable:
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
reader.close();
dir.close();
@ -583,7 +583,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
long gen = SegmentInfos.getCurrentSegmentGeneration(dir);
for(int i=0;i<N+1;i++) {
try {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
reader.close();
if (i == N) {
fail("should have failed on commits prior to last " + N);
@ -668,7 +668,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
assertEquals(3*(N+1), policy.numOnInit);
assertEquals(3*(N+1)+1, policy.numOnCommit);
IndexReader rwReader = IndexReader.open(dir, false);
IndexReader rwReader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(rwReader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
assertEquals(0, hits.length);
@ -685,7 +685,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
for(int i=0;i<N+1;i++) {
try {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
// Work backwards in commits on what the expected
// count should be.

View File

@ -64,7 +64,7 @@ public class TestDirectoryReader extends LuceneTestCase {
protected IndexReader openReader() throws IOException {
IndexReader reader;
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
assertTrue(reader instanceof DirectoryReader);
assertTrue(dir != null);
@ -95,7 +95,7 @@ public class TestDirectoryReader extends LuceneTestCase {
addDoc(random, ramDir1, "test foo", true);
Directory ramDir2=newDirectory();
addDoc(random, ramDir2, "test blah", true);
IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false)};
IndexReader[] readers = new IndexReader[]{IndexReader.open(ramDir1), IndexReader.open(ramDir2)};
MultiReader mr = new MultiReader(readers);
assertTrue(mr.isCurrent()); // just opened, must be current
addDoc(random, ramDir1, "more text", false);
@ -121,8 +121,8 @@ public class TestDirectoryReader extends LuceneTestCase {
Directory ramDir3=newDirectory();
addDoc(random, ramDir3, "test wow", true);
IndexReader[] readers1 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir3, false)};
IndexReader[] readers2 = new IndexReader[]{IndexReader.open(ramDir1, false), IndexReader.open(ramDir2, false), IndexReader.open(ramDir3, false)};
IndexReader[] readers1 = new IndexReader[]{IndexReader.open(ramDir1), IndexReader.open(ramDir3)};
IndexReader[] readers2 = new IndexReader[]{IndexReader.open(ramDir1), IndexReader.open(ramDir2), IndexReader.open(ramDir3)};
MultiReader mr2 = new MultiReader(readers1);
MultiReader mr3 = new MultiReader(readers2);

View File

@ -285,7 +285,7 @@ public class TestDocumentWriter extends LuceneTestCase {
_TestUtil.checkIndex(dir);
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
// f1
Terms tfv1 = reader.getTermVectors(0).terms("f1");
assertNotNull(tfv1);
@ -326,7 +326,7 @@ public class TestDocumentWriter extends LuceneTestCase {
_TestUtil.checkIndex(dir);
SegmentReader reader = getOnlySegmentReader(IndexReader.open(dir, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(dir));
FieldInfos fi = reader.fieldInfos();
// f1
assertFalse("f1 should have no norms", reader.hasNorms("f1"));

View File

@ -206,7 +206,7 @@ public class TestFieldsReader extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
FaultyIndexInput.doFail = true;

View File

@ -144,11 +144,11 @@ public class TestFilterIndexReader extends LuceneTestCase {
Directory target = newDirectory();
writer = new IndexWriter(target, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
IndexReader reader = new TestReader(IndexReader.open(directory, true));
IndexReader reader = new TestReader(IndexReader.open(directory));
writer.addIndexes(reader);
writer.close();
reader.close();
reader = IndexReader.open(target, true);
reader = IndexReader.open(target);
TermsEnum terms = MultiFields.getTerms(reader, "default").iterator(null);
while (terms.next() != null) {

View File

@ -61,7 +61,7 @@ public class TestIndexReader extends LuceneTestCase {
addDocumentWithFields(writer);
writer.close();
// set up reader:
IndexReader reader = IndexReader.open(d, false);
IndexReader reader = IndexReader.open(d);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT,
@ -104,7 +104,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.close();
// set up reader
IndexReader reader = IndexReader.open(d, false);
IndexReader reader = IndexReader.open(d);
Collection<String> fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
assertTrue(fieldNames.contains("keyword"));
assertTrue(fieldNames.contains("text"));
@ -164,7 +164,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.close();
// verify fields again
reader = IndexReader.open(d, false);
reader = IndexReader.open(d);
fieldNames = reader.getFieldNames(IndexReader.FieldOption.ALL);
assertEquals(13, fieldNames.size()); // the following fields
assertTrue(fieldNames.contains("keyword"));
@ -299,7 +299,7 @@ public class TestIndexReader extends LuceneTestCase {
doc.add(new TextField("junk", "junk text"));
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
Document doc2 = reader.document(reader.maxDoc() - 1);
IndexableField[] fields = doc2.getFields("bin1");
assertNotNull(fields);
@ -318,7 +318,7 @@ public class TestIndexReader extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
writer.forceMerge(1);
writer.close();
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
doc2 = reader.document(reader.maxDoc() - 1);
fields = doc2.getFields("bin1");
assertNotNull(fields);
@ -370,7 +370,7 @@ public class TestIndexReader extends LuceneTestCase {
// Now open existing directory and test that reader closes all files
dir = newFSDirectory(dirFile);
IndexReader reader1 = IndexReader.open(dir, false);
IndexReader reader1 = IndexReader.open(dir);
reader1.close();
dir.close();
@ -388,7 +388,7 @@ public class TestIndexReader extends LuceneTestCase {
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
assertTrue(IndexReader.indexExists(dir));
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
long version = IndexReader.lastModified(dir);
if (i == 1) {
@ -403,7 +403,7 @@ public class TestIndexReader extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
assertTrue("old lastModified is " + version + "; new lastModified is " + IndexReader.lastModified(dir), version <= IndexReader.lastModified(dir));
reader.close();
dir.close();
@ -418,7 +418,7 @@ public class TestIndexReader extends LuceneTestCase {
assertTrue(IndexWriter.isLocked(dir)); // writer open, so dir is locked
writer.close();
assertTrue(IndexReader.indexExists(dir));
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
assertFalse(IndexWriter.isLocked(dir)); // reader only, no lock
long version = IndexReader.getCurrentVersion(dir);
reader.close();
@ -427,7 +427,7 @@ public class TestIndexReader extends LuceneTestCase {
writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
assertTrue("old version is " + version + "; new version is " + IndexReader.getCurrentVersion(dir), version < IndexReader.getCurrentVersion(dir));
reader.close();
dir.close();
@ -437,7 +437,7 @@ public class TestIndexReader extends LuceneTestCase {
File dirFile = _TestUtil.getTempDir("deletetest");
Directory dir = newFSDirectory(dirFile);
try {
IndexReader.open(dir, false);
IndexReader.open(dir);
fail("expected FileNotFoundException");
} catch (FileNotFoundException e) {
// expected
@ -447,7 +447,7 @@ public class TestIndexReader extends LuceneTestCase {
// Make sure we still get a CorruptIndexException (not NPE):
try {
IndexReader.open(dir, false);
IndexReader.open(dir);
fail("expected FileNotFoundException");
} catch (FileNotFoundException e) {
// expected
@ -629,7 +629,7 @@ public class TestIndexReader extends LuceneTestCase {
SegmentInfos sis = new SegmentInfos();
sis.read(d);
IndexReader r = IndexReader.open(d, false);
IndexReader r = IndexReader.open(d);
IndexCommit c = r.getIndexCommit();
assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());
@ -686,7 +686,7 @@ public class TestIndexReader extends LuceneTestCase {
public void testNoDir() throws Throwable {
Directory dir = newFSDirectory(_TestUtil.getTempDir("doesnotexist"));
try {
IndexReader.open(dir, true);
IndexReader.open(dir);
fail("did not hit expected exception");
} catch (NoSuchDirectoryException nsde) {
// expected
@ -731,7 +731,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.close();
// Open reader
IndexReader r = getOnlySegmentReader(IndexReader.open(dir, false));
IndexReader r = getOnlySegmentReader(IndexReader.open(dir));
final int[] ints = FieldCache.DEFAULT.getInts(r, "number", false);
assertEquals(1, ints.length);
assertEquals(17, ints[0]);
@ -766,7 +766,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.commit();
// Open reader1
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
IndexReader r1 = getOnlySegmentReader(r);
final int[] ints = FieldCache.DEFAULT.getInts(r1, "number", false);
assertEquals(1, ints.length);
@ -800,7 +800,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.addDocument(doc);
writer.commit();
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
IndexReader r1 = getOnlySegmentReader(r);
assertEquals(36, r1.getUniqueTermCount());
writer.addDocument(doc);
@ -830,7 +830,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader r = IndexReader.open(dir, null, true, -1);
IndexReader r = IndexReader.open(dir, -1);
try {
r.docFreq(new Term("field", "f"));
fail("did not hit expected exception");
@ -875,7 +875,7 @@ public class TestIndexReader extends LuceneTestCase {
writer.commit();
Document doc = new Document();
writer.addDocument(doc);
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
assertTrue(r.isCurrent());
writer.addDocument(doc);
writer.prepareCommit();

View File

@ -182,7 +182,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.close();
// now open reader:
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals("should be one document", reader.numDocs(), 1);
// now open index for create:
@ -192,7 +192,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.close();
assertEquals("should be one document", reader.numDocs(), 1);
IndexReader reader2 = IndexReader.open(dir, true);
IndexReader reader2 = IndexReader.open(dir);
assertEquals("should be one document", reader2.numDocs(), 1);
reader.close();
reader2.close();
@ -227,7 +227,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.commit();
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(0, reader.maxDoc());
assertEquals(0, reader.numDocs());
reader.close();
@ -236,7 +236,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.commit();
writer.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(0, reader.maxDoc());
assertEquals(0, reader.numDocs());
reader.close();
@ -258,7 +258,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(100, reader.maxDoc());
assertEquals(100, reader.numDocs());
for(int j=0;j<100;j++) {
@ -452,7 +452,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
writer.close();
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("field", "aaa")), null, 1000).scoreDocs;
assertEquals(300, hits.length);
@ -484,7 +484,7 @@ public class TestIndexWriter extends LuceneTestCase {
Term searchTerm = new Term("field", "aaa");
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals(10, hits.length);
@ -507,14 +507,14 @@ public class TestIndexWriter extends LuceneTestCase {
writer.addDocument(doc);
}
writer.close();
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
searcher = new IndexSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals(27, hits.length);
searcher.close();
reader.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
reader.close();
dir.close();
@ -541,7 +541,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(1, reader.maxDoc());
assertEquals(1, reader.numDocs());
Term t = new Term("field", "a");
@ -586,7 +586,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
writer.close();
Term searchTerm = new Term("content", "aaa");
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("did not get right number of hits", 100, hits.length);
@ -643,7 +643,7 @@ public class TestIndexWriter extends LuceneTestCase {
}
writer.addDocument(new Document());
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
dir.close();
@ -826,7 +826,7 @@ public class TestIndexWriter extends LuceneTestCase {
t1.join();
// Make sure reader can read
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
reader.close();
// Reopen
@ -854,7 +854,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
Term t = new Term("field", "x");
assertEquals(1, reader.docFreq(t));
reader.close();
@ -881,7 +881,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newField("", "a b c", TextField.TYPE_UNSTORED));
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
IndexReader subreader = getOnlySegmentReader(reader);
TermsEnum te = subreader.fields().terms("").iterator(null);
assertEquals(new BytesRef("a"), te.next());
@ -902,7 +902,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc.add(newField("", "c", StringField.TYPE_UNSTORED));
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
IndexReader subreader = getOnlySegmentReader(reader);
TermsEnum te = subreader.fields().terms("").iterator(null);
assertEquals(new BytesRef(""), te.next());
@ -956,7 +956,7 @@ public class TestIndexWriter extends LuceneTestCase {
assertTrue(w.afterWasCalled);
w.close();
IndexReader ir = IndexReader.open(dir, true);
IndexReader ir = IndexReader.open(dir);
assertEquals(0, ir.numDocs());
ir.close();
@ -990,7 +990,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
w.commit();
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
IndexSearcher s = new IndexSearcher(r);
PhraseQuery pq = new PhraseQuery();
pq.add(new Term("field", "a"));
@ -1039,7 +1039,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader ir = IndexReader.open(dir, true);
IndexReader ir = IndexReader.open(dir);
Document doc2 = ir.document(0);
IndexableField f2 = doc2.getField("binary");
b = f2.binaryValue().bytes;
@ -1068,7 +1068,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
Terms tpv = r.getTermVectors(0).terms("field");
TermsEnum termsEnum = tpv.iterator(null);
assertNotNull(termsEnum.next());
@ -1132,12 +1132,12 @@ public class TestIndexWriter extends LuceneTestCase {
writer2.addDocument(doc);
writer2.close();
IndexReader r1 = IndexReader.open(dir2, true);
IndexReader r1 = IndexReader.open(dir2);
IndexReader r2 = (IndexReader) r1.clone();
writer.addIndexes(r1, r2);
writer.close();
IndexReader r3 = IndexReader.open(dir, true);
IndexReader r3 = IndexReader.open(dir);
assertEquals(5, r3.numDocs());
r3.close();
@ -1182,7 +1182,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.close();
w = null;
_TestUtil.checkIndex(dir);
IndexReader.open(dir, true).close();
IndexReader.open(dir).close();
// Strangely, if we interrupt a thread before
// all classes are loaded, the class loader
@ -1232,7 +1232,7 @@ public class TestIndexWriter extends LuceneTestCase {
e.printStackTrace(System.out);
}
try {
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
//System.out.println("doc count=" + r.numDocs());
r.close();
} catch (Exception e) {
@ -1318,7 +1318,7 @@ public class TestIndexWriter extends LuceneTestCase {
w.forceMerge(1); // force segment merge.
w.close();
IndexReader ir = IndexReader.open(dir, true);
IndexReader ir = IndexReader.open(dir);
Document doc2 = ir.document(0);
IndexableField f3 = doc2.getField("binary");
b = f3.binaryValue().bytes;

View File

@ -51,21 +51,21 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.close();
Term searchTerm = new Term("content", "aaa");
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("first number of hits", 14, hits.length);
searcher.close();
reader.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
for(int i=0;i<3;i++) {
for(int j=0;j<11;j++) {
TestIndexWriter.addDoc(writer);
}
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
searcher = new IndexSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
@ -78,7 +78,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.close();
assertFalse("reader should not be current now", reader.isCurrent());
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
searcher = new IndexSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("reader did not see changes after writer was closed", 47, hits.length);
@ -105,7 +105,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.close();
Term searchTerm = new Term("content", "aaa");
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
IndexSearcher searcher = new IndexSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("first number of hits", 14, hits.length);
@ -120,7 +120,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
// Delete all docs:
writer.deleteDocuments(searchTerm);
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
searcher = new IndexSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
@ -132,7 +132,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(dir, "unreferenced files remain after rollback()");
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
searcher = new IndexSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("saw changes after writer.abort", 14, hits.length);
@ -152,7 +152,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for(int j=0;j<17;j++) {
TestIndexWriter.addDoc(writer);
}
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
searcher = new IndexSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
@ -161,7 +161,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
writer.close();
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
searcher = new IndexSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
assertEquals("didn't see changes after close", 218, hits.length);
@ -243,7 +243,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader.open(dir, true).close();
IndexReader.open(dir).close();
long endDiskUsage = dir.getMaxUsedSizeInBytes();
@ -287,7 +287,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.forceMerge(1);
// Open a reader before closing (commiting) the writer:
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
// Reader should see index as multi-seg at this
// point:
@ -299,7 +299,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(dir, "aborted writer after forceMerge");
// Open a reader after aborting writer:
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
// Reader should still see index as multi-segment
assertTrue("Reader incorrectly sees one segment", reader.getSequentialSubReaders().length > 1);
@ -318,7 +318,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
TestIndexWriter.assertNoUnreferencedFiles(dir, "aborted writer after forceMerge");
// Open a reader after aborting writer:
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
// Reader should see index as one segment
assertEquals("Reader incorrectly sees more than one segment", 1, reader.getSequentialSubReaders().length);
@ -398,7 +398,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 23; i++)
TestIndexWriter.addDoc(writer);
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
writer.commit();
IndexReader reader2 = IndexReader.openIfChanged(reader);
@ -411,12 +411,12 @@ public class TestIndexWriterCommit extends LuceneTestCase {
TestIndexWriter.addDoc(writer);
assertEquals(23, reader2.numDocs());
reader2.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(23, reader.numDocs());
reader.close();
writer.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(40, reader.numDocs());
reader.close();
writer.close();
@ -509,12 +509,12 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 23; i++)
TestIndexWriter.addDoc(writer);
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
writer.prepareCommit();
IndexReader reader2 = IndexReader.open(dir, true);
IndexReader reader2 = IndexReader.open(dir);
assertEquals(0, reader2.numDocs());
writer.commit();
@ -532,18 +532,18 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals(23, reader3.numDocs());
reader3.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(23, reader.numDocs());
reader.close();
writer.prepareCommit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(23, reader.numDocs());
reader.close();
writer.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(40, reader.numDocs());
reader.close();
writer.close();
@ -566,12 +566,12 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 23; i++)
TestIndexWriter.addDoc(writer);
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
writer.prepareCommit();
IndexReader reader2 = IndexReader.open(dir, true);
IndexReader reader2 = IndexReader.open(dir);
assertEquals(0, reader2.numDocs());
writer.rollback();
@ -587,18 +587,18 @@ public class TestIndexWriterCommit extends LuceneTestCase {
for (int i = 0; i < 17; i++)
TestIndexWriter.addDoc(writer);
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
writer.prepareCommit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
writer.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(17, reader.numDocs());
reader.close();
writer.close();
@ -614,7 +614,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.commit();
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
dir.close();
@ -630,7 +630,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals(0, IndexReader.getCommitUserData(dir).size());
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
// commit(Map) never called for this index
assertEquals(0, r.getCommitUserData().size());
r.close();
@ -645,7 +645,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
assertEquals("test1", IndexReader.getCommitUserData(dir).get("label"));
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
assertEquals("test1", r.getCommitUserData().get("label"));
r.close();

View File

@ -106,7 +106,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.commit();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -114,7 +114,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(0, reader.numDocs());
reader.close();
modifier.close();
@ -166,7 +166,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
assertEquals(0, modifier.getSegmentCount());
modifier.commit();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(1, reader.numDocs());
int hitCount = getHitCount(dir, new Term("id", String.valueOf(id)));
@ -204,7 +204,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.commit();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(5, reader.numDocs());
modifier.close();
reader.close();
@ -226,7 +226,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
modifier.commit();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -236,7 +236,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(5, reader.numDocs());
reader.close();
@ -246,7 +246,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
modifier.deleteDocuments(terms);
modifier.commit();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
@ -269,7 +269,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
}
modifier.commit();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -280,7 +280,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.deleteAll();
// Delete all shouldn't be on disk yet
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -292,7 +292,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.commit();
// Validate there are no docs left
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(2, reader.numDocs());
reader.close();
@ -317,7 +317,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
addDoc(modifier, ++id, value);
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -329,7 +329,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.close();
// Validate that the docs are still there
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -372,7 +372,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
modifier.close();
// Validate that the docs are still there
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
assertEquals(7, reader.numDocs());
reader.close();
@ -567,7 +567,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
// changed (transactional semantics):
IndexReader newReader = null;
try {
newReader = IndexReader.open(dir, true);
newReader = IndexReader.open(dir);
}
catch (IOException e) {
e.printStackTrace();

View File

@ -254,7 +254,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// Confirm that when doc hits exception partway through tokenization, it's deleted:
IndexReader r2 = IndexReader.open(dir, true);
IndexReader r2 = IndexReader.open(dir);
final int count = r2.docFreq(new Term("content4", "aaa"));
final int count2 = r2.docFreq(new Term("content4", "ddd"));
assertEquals(count, count2);
@ -300,7 +300,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
// Confirm that when doc hits exception partway through tokenization, it's deleted:
IndexReader r2 = IndexReader.open(dir, true);
IndexReader r2 = IndexReader.open(dir);
final int count = r2.docFreq(new Term("content4", "aaa"));
final int count2 = r2.docFreq(new Term("content4", "ddd"));
assertEquals(count, count2);
@ -494,7 +494,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
final Term t = new Term("content", "aa");
assertEquals(3, reader.docFreq(t));
@ -576,7 +576,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
}
assertTrue(hitError);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(198, reader.docFreq(new Term("content", "aa")));
reader.close();
dir.close();
@ -631,7 +631,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: open reader");
}
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
if (i == 0) {
int expected = 5;
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
@ -660,7 +660,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
int expected = 19+(1-i)*2;
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
assertEquals(expected, reader.maxDoc());
@ -746,7 +746,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.close();
}
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
int expected = (3+(1-i)*2)*NUM_THREAD*NUM_ITER;
assertEquals("i=" + i, expected, reader.docFreq(new Term("contents", "here")));
assertEquals(expected, reader.maxDoc());
@ -774,7 +774,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
expected += 17-NUM_THREAD*NUM_ITER;
assertEquals(expected, reader.docFreq(new Term("contents", "here")));
assertEquals(expected, reader.maxDoc());
@ -845,7 +845,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
failure.clearDoFail();
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(23, reader.numDocs());
reader.close();
dir.close();
@ -1058,7 +1058,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexReader reader = null;
try {
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
} catch (IOException e) {
e.printStackTrace(System.out);
fail("segmentInfos failed to retry fallback to correct segments_N file");
@ -1105,7 +1105,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexReader reader = null;
try {
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
fail("reader did not hit IOException on opening a corrupt index");
} catch (Exception e) {
}
@ -1154,7 +1154,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexReader reader = null;
try {
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
fail("reader did not hit IOException on opening a corrupt index");
} catch (Exception e) {
}
@ -1205,7 +1205,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
IndexReader reader = null;
try {
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
} catch (Exception e) {
fail("reader failed to open on a crashed index");
}

View File

@ -186,7 +186,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
if (0 == pass) {
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals(1, reader.getSequentialSubReaders().length);
reader.close();
} else {
@ -196,7 +196,7 @@ public class TestIndexWriterForceMerge extends LuceneTestCase {
writer.addDocument(doc);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertTrue(reader.getSequentialSubReaders().length > 1);
reader.close();

View File

@ -78,7 +78,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
private boolean verifyIndex(Directory directory, int startAt) throws IOException
{
boolean fail = false;
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
int max = reader.maxDoc();
for (int i = 0; i < max; i++)
@ -144,7 +144,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer.addDocument(document);
writer.close();
IndexReader ir = IndexReader.open(dir, false);
IndexReader ir = IndexReader.open(dir);
assertEquals(10, ir.maxDoc());
assertEquals(10, ir.numDocs());
ir.deleteDocument(0);
@ -158,7 +158,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer.forceMergeDeletes();
assertEquals(8, writer.numDocs());
writer.close();
ir = IndexReader.open(dir, true);
ir = IndexReader.open(dir);
assertEquals(8, ir.maxDoc());
assertEquals(8, ir.numDocs());
ir.close();
@ -195,7 +195,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer.addDocument(document);
writer.close();
IndexReader ir = IndexReader.open(dir, false);
IndexReader ir = IndexReader.open(dir);
assertEquals(98, ir.maxDoc());
assertEquals(98, ir.numDocs());
for(int i=0;i<98;i+=2)
@ -211,7 +211,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
assertEquals(49, writer.numDocs());
writer.forceMergeDeletes();
writer.close();
ir = IndexReader.open(dir, true);
ir = IndexReader.open(dir);
assertEquals(49, ir.maxDoc());
assertEquals(49, ir.numDocs());
ir.close();
@ -248,7 +248,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
writer.addDocument(document);
writer.close();
IndexReader ir = IndexReader.open(dir, false);
IndexReader ir = IndexReader.open(dir);
assertEquals(98, ir.maxDoc());
assertEquals(98, ir.numDocs());
for(int i=0;i<98;i+=2)
@ -263,7 +263,7 @@ public class TestIndexWriterMerging extends LuceneTestCase
);
writer.forceMergeDeletes(false);
writer.close();
ir = IndexReader.open(dir, true);
ir = IndexReader.open(dir);
assertEquals(49, ir.maxDoc());
assertEquals(49, ir.numDocs());
ir.close();

View File

@ -113,7 +113,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
assertNoUnreferencedFiles(dir, "after disk full during addDocument");
// Make sure reader can open the index:
IndexReader.open(dir, true).close();
IndexReader.open(dir).close();
}
dir.close();
@ -189,7 +189,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// Make sure starting index seems to be working properly:
Term searchTerm = new Term("content", "aaa");
IndexReader reader = IndexReader.open(startDir, true);
IndexReader reader = IndexReader.open(startDir);
assertEquals("first docFreq", 57, reader.docFreq(searchTerm));
IndexSearcher searcher = newSearcher(reader);
@ -306,7 +306,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
} else if (1 == method) {
IndexReader readers[] = new IndexReader[dirs.length];
for(int i=0;i<dirs.length;i++) {
readers[i] = IndexReader.open(dirs[i], true);
readers[i] = IndexReader.open(dirs[i]);
}
try {
writer.addIndexes(readers);
@ -355,7 +355,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
// failed, we see either all docs or no docs added
// (transactional semantics):
try {
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
} catch (IOException e) {
e.printStackTrace(System.out);
fail(testName + ": exception when creating IndexReader: " + e);

View File

@ -159,7 +159,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
writer.close();
assertTrue(r2.isCurrent());
IndexReader r3 = IndexReader.open(dir1, true);
IndexReader r3 = IndexReader.open(dir1);
assertTrue(r3.isCurrent());
assertTrue(r2.isCurrent());
assertEquals(0, count(new Term("id", id10), r3));
@ -384,7 +384,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
_TestUtil.checkIndex(mainDir);
IndexReader reader = IndexReader.open(mainDir, true);
IndexReader reader = IndexReader.open(mainDir);
assertEquals(addDirThreads.count.intValue(), reader.numDocs());
//assertEquals(100 + numDirs * (3 * numIter / 4) * addDirThreads.numThreads
// * addDirThreads.NUM_INIT_DOCS, reader.numDocs());
@ -420,7 +420,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
readers = new IndexReader[numDirs];
for (int i = 0; i < numDirs; i++)
readers[i] = IndexReader.open(addDir, false);
readers[i] = IndexReader.open(addDir);
}
void joinThreads() {
@ -892,7 +892,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
w.forceMergeDeletes();
w.close();
r.close();
r = IndexReader.open(dir, true);
r = IndexReader.open(dir);
assertEquals(1, r.numDocs());
assertFalse(r.hasDeletions());
r.close();

View File

@ -260,7 +260,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader ir = IndexReader.open(dir, true);
IndexReader ir = IndexReader.open(dir);
Document doc2 = ir.document(0);
for(int i=0;i<count;i++) {
assertEquals("field " + i + " was not indexed correctly", 1, ir.docFreq(new Term("f"+i, utf8Data[2*i+1])));

View File

@ -209,7 +209,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
}
// Quick test to make sure index is not corrupt:
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
DocsEnum tdocs = _TestUtil.docs(random, reader,
"field",
new BytesRef("aaa"),
@ -276,7 +276,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
}
if (success) {
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
final Bits delDocs = MultiFields.getLiveDocs(reader);
for(int j=0;j<reader.maxDoc();j++) {
if (delDocs == null || !delDocs.get(j)) {
@ -447,7 +447,7 @@ public class TestIndexWriterWithThreads extends LuceneTestCase {
assertFalse("Failed due to: " + thread1.failure, thread1.failed);
assertFalse("Failed due to: " + thread2.failure, thread2.failed);
// now verify that we have two documents in the index
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertEquals("IndexReader should have one document per thread running", 2,
reader.numDocs());

View File

@ -105,7 +105,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(directory, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(directory));
this.searcher = newSearcher(reader);
}
@ -153,7 +153,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
}
writer.close();
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
DocsAndPositionsEnum tp = MultiFields.getTermPositionsEnum(reader,
MultiFields.getLiveDocs(reader),

View File

@ -66,7 +66,7 @@ public class TestOmitNorms extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitNorms);
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitNorms);
@ -120,7 +120,7 @@ public class TestOmitNorms extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f1").omitNorms);
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitNorms);
@ -168,7 +168,7 @@ public class TestOmitNorms extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertTrue("OmitNorms field bit should not be set.", !fi.fieldInfo("f1").omitNorms);
assertTrue("OmitNorms field bit should be set.", fi.fieldInfo("f2").omitNorms);

View File

@ -153,7 +153,7 @@ public class TestOmitPositions extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
// docs + docs = docs
assertEquals(IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").indexOptions);

View File

@ -100,7 +100,7 @@ public class TestOmitTf extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").indexOptions);
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").indexOptions);
@ -152,7 +152,7 @@ public class TestOmitTf extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f1").indexOptions);
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").indexOptions);
@ -195,7 +195,7 @@ public class TestOmitTf extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertEquals("OmitTermFreqAndPositions field bit should not be set.", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, fi.fieldInfo("f1").indexOptions);
assertEquals("OmitTermFreqAndPositions field bit should be set.", IndexOptions.DOCS_ONLY, fi.fieldInfo("f2").indexOptions);

View File

@ -75,8 +75,8 @@ public class TestParallelReader extends LuceneTestCase {
Directory dir1 = getDir1(random);
Directory dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
pr.add(IndexReader.open(dir1));
pr.add(IndexReader.open(dir2));
Collection<String> fieldNames = pr.getFieldNames(IndexReader.FieldOption.ALL);
assertEquals(4, fieldNames.size());
assertTrue(fieldNames.contains("f1"));
@ -102,8 +102,8 @@ public class TestParallelReader extends LuceneTestCase {
w2.close();
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
IndexReader ir = IndexReader.open(dir2, false);
pr.add(IndexReader.open(dir1));
IndexReader ir = IndexReader.open(dir2);
try {
pr.add(ir);
fail("didn't get exptected exception: indexes don't have same number of documents");
@ -149,7 +149,7 @@ public class TestParallelReader extends LuceneTestCase {
w.addDocument(d2);
w.close();
IndexReader ir = IndexReader.open(dir, false);
IndexReader ir = IndexReader.open(dir);
return newSearcher(ir);
}
@ -158,8 +158,8 @@ public class TestParallelReader extends LuceneTestCase {
dir1 = getDir1(random);
dir2 = getDir2(random);
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(dir1, false));
pr.add(IndexReader.open(dir2, false));
pr.add(IndexReader.open(dir1));
pr.add(IndexReader.open(dir2));
return newSearcher(pr);
}

View File

@ -52,8 +52,8 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));
pr.add(IndexReader.open(rd1));
pr.add(IndexReader.open(rd2));
// When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum)
iwOut.addIndexes(pr);
@ -84,7 +84,7 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
iw.addDocument(doc);
iw.close();
IndexReader ir = IndexReader.open(rd1,false);
IndexReader ir = IndexReader.open(rd1);
ir.deleteDocument(0);
ir.close();
@ -105,8 +105,8 @@ public class TestParallelReaderEmptyIndex extends LuceneTestCase {
IndexWriter iwOut = new IndexWriter(rdOut, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));
pr.add(IndexReader.open(rd1));
pr.add(IndexReader.open(rd2));
// When unpatched, Lucene crashes here with an ArrayIndexOutOfBoundsException (caused by TermVectorsWriter)
iwOut.addIndexes(pr);

View File

@ -58,8 +58,8 @@ public class TestParallelTermEnum extends LuceneTestCase {
iw2.close();
this.ir1 = IndexReader.open(rd1, true);
this.ir2 = IndexReader.open(rd2, true);
this.ir1 = IndexReader.open(rd1);
this.ir2 = IndexReader.open(rd2);
}
@Override

View File

@ -111,7 +111,7 @@ public class TestPayloads extends LuceneTestCase {
// flush
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(ram));
FieldInfos fi = reader.fieldInfos();
assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").storePayloads);
assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").storePayloads);
@ -138,7 +138,7 @@ public class TestPayloads extends LuceneTestCase {
// flush
writer.close();
reader = getOnlySegmentReader(IndexReader.open(ram, false));
reader = getOnlySegmentReader(IndexReader.open(ram));
fi = reader.fieldInfos();
assertFalse("Payload field bit should not be set.", fi.fieldInfo("f1").storePayloads);
assertTrue("Payload field bit should be set.", fi.fieldInfo("f2").storePayloads);
@ -213,7 +213,7 @@ public class TestPayloads extends LuceneTestCase {
* Verify the index
* first we test if all payloads are stored correctly
*/
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
byte[] verifyPayloadData = new byte[payloadDataLength];
offset = 0;
@ -326,7 +326,7 @@ public class TestPayloads extends LuceneTestCase {
// flush
writer.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
tp = MultiFields.getTermPositionsEnum(reader,
MultiFields.getLiveDocs(reader),
fieldName,
@ -526,7 +526,7 @@ public class TestPayloads extends LuceneTestCase {
ingesters[i].join();
}
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
TermsEnum terms = MultiFields.getFields(reader).terms(field).iterator(null);
Bits liveDocs = MultiFields.getLiveDocs(reader);
DocsAndPositionsEnum tp = null;

View File

@ -48,7 +48,7 @@ public class TestRollback extends LuceneTestCase {
}
w.rollback();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
assertEquals("index should contain same number of docs post rollback", 5, r.numDocs());
r.close();
dir.close();

View File

@ -130,7 +130,7 @@ public class TestSegmentTermDocs extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, null, true, indexDivisor);
IndexReader reader = IndexReader.open(dir, indexDivisor);
DocsEnum tdocs = _TestUtil.docs(random, reader,
ta.field(),

View File

@ -78,7 +78,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setCodec(_TestUtil.alwaysPostingsFormat(new Lucene40PostingsFormat())));
addDoc(writer, "aaa bbb");
writer.close();
SegmentReader reader = getOnlySegmentReader(IndexReader.open(dir, false));
SegmentReader reader = getOnlySegmentReader(IndexReader.open(dir));
TermsEnum terms = reader.fields().terms("content").iterator(null);
assertNotNull(terms.next());
assertEquals("aaa", terms.term().utf8ToString());
@ -102,7 +102,7 @@ public class TestSegmentTermEnum extends LuceneTestCase {
private void verifyDocFreq()
throws IOException
{
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
TermsEnum termEnum = MultiFields.getTerms(reader, "content").iterator(null);
// create enumeration of all terms

View File

@ -290,7 +290,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
writer.close();
// delete the last document, so that the last segment is merged.
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
r.deleteDocument(r.numDocs() - 1);
r.close();
@ -348,7 +348,7 @@ public class TestSizeBoundedForceMerge extends LuceneTestCase {
writer.close();
// delete the last document
IndexReader r = IndexReader.open(dir, false);
IndexReader r = IndexReader.open(dir);
r.deleteDocument(r.numDocs() - 1);
r.close();

View File

@ -54,7 +54,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
}
protected void checkMaxDoc(IndexCommit commit, int expectedMaxDoc) throws Exception {
IndexReader reader = IndexReader.open(commit, true);
IndexReader reader = IndexReader.open(commit);
try {
assertEquals(expectedMaxDoc, reader.maxDoc());
} finally {
@ -245,7 +245,7 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase {
assertSnapshotExists(dir, sdp, numSnapshots);
// open a reader on a snapshot - should succeed.
IndexReader.open(sdp.getSnapshot("snapshot0"), true).close();
IndexReader.open(sdp.getSnapshot("snapshot0")).close();
// open a new IndexWriter w/ no snapshots to keep and assert that all snapshots are gone.
sdp = getDeletionPolicy();

View File

@ -103,7 +103,7 @@ public class TestStressIndexing extends LuceneTestCase {
@Override
public void doWork() throws Throwable {
for (int i=0; i<100; i++) {
IndexReader ir = IndexReader.open(directory, true);
IndexReader ir = IndexReader.open(directory);
IndexSearcher is = new IndexSearcher(ir);
is.close();
ir.close();

View File

@ -272,8 +272,8 @@ public class TestStressIndexing2 extends LuceneTestCase {
}
public static void verifyEquals(Directory dir1, Directory dir2, String idField) throws Throwable {
IndexReader r1 = IndexReader.open(dir1, true);
IndexReader r2 = IndexReader.open(dir2, true);
IndexReader r1 = IndexReader.open(dir1);
IndexReader r2 = IndexReader.open(dir2);
verifyEquals(r1, r2, idField);
r1.close();
r2.close();

View File

@ -59,7 +59,7 @@ public class TestSumDocFreq extends LuceneTestCase {
ir.close();
/* nocommit: fix this to use IW to delete documents
ir = IndexReader.open(dir, false);
ir = IndexReader.open(dir);
int numDeletions = atLeast(20);
for (int i = 0; i < numDeletions; i++) {
ir.deleteDocument(random.nextInt(ir.maxDoc()));
@ -70,7 +70,7 @@ public class TestSumDocFreq extends LuceneTestCase {
w.forceMerge(1);
w.close();
ir = IndexReader.open(dir, true);
ir = IndexReader.open(dir);
assertSumDocFreq(ir);
ir.close();
*/

View File

@ -59,7 +59,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
Terms vector = r.getTermVectors(0).terms("field");
assertNotNull(vector);
TermsEnum termsEnum = vector.iterator(null);
@ -119,7 +119,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -156,7 +156,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -197,7 +197,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -235,7 +235,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -274,7 +274,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -325,7 +325,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -372,7 +372,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
w.addDocument(doc);
w.close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
TermsEnum termsEnum = r.getTermVectors(0).terms("field").iterator(null);
assertNotNull(termsEnum.next());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
@ -433,7 +433,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
for(int i=0;i<reader.numDocs();i++) {
reader.document(i);
reader.getTermVectors(i);
@ -487,7 +487,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
assertNull(reader.getTermVectors(0));
assertNull(reader.getTermVectors(1));
assertNotNull(reader.getTermVectors(2));
@ -532,7 +532,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
writer.forceMerge(1);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
for(int i=0;i<10;i++) {
reader.getTermVectors(i);
reader.document(i);

View File

@ -112,7 +112,7 @@ public class TestTermdocPerf extends LuceneTestCase {
long end = System.currentTimeMillis();
if (VERBOSE) System.out.println("milliseconds for creation of " + ndocs + " docs = " + (end-start));
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
TermsEnum tenum = MultiFields.getTerms(reader, "foo").iterator(null);

View File

@ -123,7 +123,7 @@ public class TestThreadedForceMerge extends LuceneTestCase {
TEST_VERSION_CURRENT, ANALYZER).setOpenMode(
OpenMode.APPEND).setMaxBufferedDocs(2));
IndexReader reader = IndexReader.open(directory, true);
IndexReader reader = IndexReader.open(directory);
assertEquals("reader=" + reader, 1, reader.getSequentialSubReaders().length);
assertEquals(expectedDocCount, reader.numDocs());
reader.close();

View File

@ -86,7 +86,7 @@ public class TestTransactionRollback extends LuceneTestCase {
}
private void checkExpecteds(BitSet expecteds) throws Exception {
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
//Perhaps not the most efficient approach but meets our
//needs here.
@ -204,7 +204,7 @@ public class TestTransactionRollback extends LuceneTestCase {
// should not work:
new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
.setIndexDeletionPolicy(new DeleteLastCommitPolicy())).close();
IndexReader r = IndexReader.open(dir, true);
IndexReader r = IndexReader.open(dir);
assertEquals(100, r.numDocs());
r.close();
}

View File

@ -183,8 +183,8 @@ public class TestTransactions extends LuceneTestCase {
public void doWork() throws Throwable {
IndexReader r1, r2;
synchronized(lock) {
r1 = IndexReader.open(dir1, true);
r2 = IndexReader.open(dir2, true);
r1 = IndexReader.open(dir1);
r2 = IndexReader.open(dir2);
}
if (r1.numDocs() != r2.numDocs())
throw new RuntimeException("doc counts differ: r1=" + r1.numDocs() + " r2=" + r2.numDocs());

View File

@ -190,7 +190,7 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
if (VERBOSE) {
System.out.println("\nTEST: assertQuery " + t);
}
IndexReader reader = IndexReader.open(dir, null, true, 1);
IndexReader reader = IndexReader.open(dir, 1);
IndexSearcher searcher = newSearcher(reader);
TopDocs search = searcher.search(new TermQuery(t), num + 10);
assertEquals(num, search.totalHits);

View File

@ -87,7 +87,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
writer.close(true);
IndexReader reader = IndexReader.open(dir, null, true, 1);
IndexReader reader = IndexReader.open(dir, 1);
assertEquals(1, reader.getSequentialSubReaders().length);
IndexSearcher searcher = new IndexSearcher(reader);

View File

@ -40,7 +40,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
writer.close();
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir, true));
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir));
AtomicReaderContext context = (AtomicReaderContext) reader.getTopReaderContext();
MockFilter filter = new MockFilter();
CachingWrapperFilter cacher = new CachingWrapperFilter(filter);
@ -66,7 +66,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
writer.close();
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir, true));
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir));
AtomicReaderContext context = (AtomicReaderContext) reader.getTopReaderContext();
final Filter filter = new Filter() {
@ -89,7 +89,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
RandomIndexWriter writer = new RandomIndexWriter(random, dir);
writer.close();
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir, true));
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir));
AtomicReaderContext context = (AtomicReaderContext) reader.getTopReaderContext();
final Filter filter = new Filter() {
@ -134,7 +134,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
writer.addDocument(new Document());
writer.close();
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir, true));
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(dir));
// not cacheable:
assertDocIdSetCacheable(reader, new QueryWrapperFilter(new TermQuery(new Term("test","value"))), false);

View File

@ -544,7 +544,7 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
writer.deleteDocuments(new Term("id","0"));
writer.close();
IndexReader reader = IndexReader.open(dir, true);
IndexReader reader = IndexReader.open(dir);
IndexSearcher search = newSearcher(reader);
assertTrue(reader.hasDeletions());

View File

@ -46,7 +46,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
addDoc("three four", iw, 300f);
iw.close();
IndexReader ir = IndexReader.open(dir, false);
IndexReader ir = IndexReader.open(dir);
IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits;

View File

@ -59,16 +59,16 @@ public class TestMultiTermQueryRewrites extends LuceneTestCase {
writer.forceMerge(1); swriter1.forceMerge(1); swriter2.forceMerge(1);
writer.close(); swriter1.close(); swriter2.close();
reader = IndexReader.open(dir, true);
reader = IndexReader.open(dir);
searcher = newSearcher(reader);
multiReader = new MultiReader(new IndexReader[] {
IndexReader.open(sdir1, true), IndexReader.open(sdir2, true)
IndexReader.open(sdir1), IndexReader.open(sdir2)
}, true);
multiSearcher = newSearcher(multiReader);
multiReaderDupls = new MultiReader(new IndexReader[] {
IndexReader.open(sdir1, true), IndexReader.open(dir, true)
IndexReader.open(sdir1), IndexReader.open(dir)
}, true);
multiSearcherDupls = newSearcher(multiReaderDupls);
}

View File

@ -67,7 +67,7 @@ public class TestMultiThreadTermVectors extends LuceneTestCase {
IndexReader reader = null;
try {
reader = IndexReader.open(directory, true);
reader = IndexReader.open(directory);
for(int i = 1; i <= numThreads; i++)
testTermPositionVectors(reader, i);

View File

@ -491,7 +491,7 @@ public class TestSpans extends LuceneTestCase {
writer.close();
// Get searcher
final IndexReader reader = IndexReader.open(dir, true);
final IndexReader reader = IndexReader.open(dir);
final IndexSearcher searcher = newSearcher(reader);
// Control (make sure docs indexed)

View File

@ -76,7 +76,7 @@ public class TestSpansAdvanced2 extends TestSpansAdvanced {
* @throws Exception
*/
public void testVerifyIndex() throws Exception {
final IndexReader reader = IndexReader.open(mDirectory, true);
final IndexReader reader = IndexReader.open(mDirectory);
assertEquals(8, reader.numDocs());
reader.close();
}

View File

@ -261,7 +261,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
dir.allIndexInputs.clear();
IndexReader reader = IndexReader.open(dir, false);
IndexReader reader = IndexReader.open(dir);
Term aaa = new Term("content", "aaa");
Term bbb = new Term("content", "bbb");
Term ccc = new Term("content", "ccc");

View File

@ -94,7 +94,7 @@ public class TestFileSwitchDirectory extends LuceneTestCase {
public void testNoDir() throws Throwable {
Directory dir = newFSSwitchDirectory(Collections.<String>emptySet());
try {
IndexReader.open(dir, true);
IndexReader.open(dir);
fail("did not hit expected exception");
} catch (NoSuchDirectoryException nsde) {
// expected

View File

@ -345,7 +345,7 @@ public class TestLockFactory extends LuceneTestCase {
Query query = new TermQuery(new Term("content", "aaa"));
for(int i=0;i<this.numIteration;i++) {
try{
reader = IndexReader.open(dir, false);
reader = IndexReader.open(dir);
searcher = new IndexSearcher(reader);
} catch (Exception e) {
hitException = true;

View File

@ -123,7 +123,7 @@ public class TestNRTCachingDirectory extends LuceneTestCase {
public void testNoDir() throws Throwable {
Directory dir = new NRTCachingDirectory(newFSDirectory(_TestUtil.getTempDir("doesnotexist")), 2.0, 25.0);
try {
IndexReader.open(dir, true);
IndexReader.open(dir);
fail("did not hit expected exception");
} catch (NoSuchDirectoryException nsde) {
// expected

View File

@ -76,7 +76,7 @@ public class TestRAMDirectory extends LuceneTestCase {
assertEquals(ramDir.sizeInBytes(), ramDir.getRecomputedSizeInBytes());
// open reader to test document count
IndexReader reader = IndexReader.open(ramDir, true);
IndexReader reader = IndexReader.open(ramDir);
assertEquals(docsToAdd, reader.numDocs());
// open search zo check if all doc's are there

View File

@ -68,8 +68,8 @@ public class TestFieldCacheSanityChecker extends LuceneTestCase {
}
wA.close();
wB.close();
readerA = IndexReader.open(dirA, true);
readerB = IndexReader.open(dirB, true);
readerA = IndexReader.open(dirA);
readerB = IndexReader.open(dirB);
readerX = new MultiReader(readerA, readerB);
}

View File

@ -35,6 +35,7 @@ public class StandardIndexReaderFactory extends IndexReaderFactory {
@Override
public IndexReader newReader(Directory indexDir, boolean readOnly)
throws IOException {
return IndexReader.open(indexDir, null, readOnly, termInfosIndexDivisor);
assert readOnly; // nocommit: readOnly is ignored - remove
return IndexReader.open(indexDir, termInfosIndexDivisor);
}
}

View File

@ -214,7 +214,7 @@ public class CoreAdminHandler extends RequestHandlerBase {
Directory dir = dirFactory.get(dirNames[i], core.getSolrConfig().mainIndexConfig.lockType);
dirsToBeReleased[i] = dir;
// TODO: why doesn't this use the IR factory? what is going on here?
readersToBeClosed[i] = IndexReader.open(dir, true);
readersToBeClosed[i] = IndexReader.open(dir);
}
}

View File

@ -111,7 +111,7 @@ public class FileBasedSpellChecker extends AbstractLuceneSpellChecker {
writer.forceMerge(1);
writer.close();
dictionary = new HighFrequencyDictionary(IndexReader.open(ramDir, true),
dictionary = new HighFrequencyDictionary(IndexReader.open(ramDir),
WORD_FIELD_NAME, 0.0f);
} else {
// check if character encoding is defined

View File

@ -64,7 +64,7 @@ public class IndexBasedSpellChecker extends AbstractLuceneSpellChecker {
if (sourceLocation != null) {
try {
FSDirectory luceneIndexDir = FSDirectory.open(new File(sourceLocation));
this.reader = IndexReader.open(luceneIndexDir, true);
this.reader = IndexReader.open(luceneIndexDir);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -63,8 +63,9 @@ public class AlternateDirectoryTest extends SolrTestCaseJ4 {
@Override
public IndexReader newReader(Directory indexDir, boolean readOnly)
throws IOException {
assert readOnly; // nocommit: readOnly is ignored - remove
TestIndexReaderFactory.newReaderCalled = true;
return IndexReader.open(indexDir, readOnly);
return IndexReader.open(indexDir);
}
}