mirror of https://github.com/apache/lucene.git
LUCENE-3606: fix more tests
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene3606@1210308 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
b8235a5bb2
commit
3843ac5b8b
|
@ -270,7 +270,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
|
|||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
|
||||
// Make sure all terms < max size were indexed
|
||||
assertEquals(2, reader.docFreq(new Term("content", "abc")));
|
||||
|
@ -303,7 +303,7 @@ public class TestClassicAnalyzer extends BaseTokenStreamTestCase {
|
|||
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, sa));
|
||||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, true);
|
||||
reader = IndexReader.open(dir);
|
||||
assertEquals(1, reader.docFreq(new Term("content", bigTerm)));
|
||||
reader.close();
|
||||
|
||||
|
|
|
@ -95,7 +95,7 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
|
|||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
DocsEnum td = _TestUtil.docs(random,
|
||||
reader,
|
||||
"partnum",
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TestLimitTokenCountAnalyzer extends BaseTokenStreamTestCase {
|
|||
writer.addDocument(doc);
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
Term t = new Term("field", "x");
|
||||
assertEquals(1, reader.docFreq(t));
|
||||
reader.close();
|
||||
|
|
|
@ -53,7 +53,7 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
|
|||
writer.addDocument(doc);
|
||||
}
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, true);
|
||||
reader = IndexReader.open(dir);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -104,7 +104,7 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
|
|||
w.addDocument(doc);
|
||||
w.close();
|
||||
|
||||
IndexReader r = IndexReader.open(dir, true);
|
||||
IndexReader r = IndexReader.open(dir);
|
||||
Terms vector = r.getTermVectors(0).terms("field");
|
||||
assertEquals(1, vector.getUniqueTermCount());
|
||||
TermsEnum termsEnum = vector.iterator(null);
|
||||
|
|
|
@ -84,7 +84,7 @@ public abstract class ReadTask extends PerfTask {
|
|||
if (searcher == null) {
|
||||
// open our own reader
|
||||
Directory dir = getRunData().getDirectory();
|
||||
reader = IndexReader.open(dir, true);
|
||||
reader = IndexReader.open(dir);
|
||||
searcher = new IndexSearcher(reader);
|
||||
closeSearcher = true;
|
||||
} else {
|
||||
|
|
|
@ -86,7 +86,7 @@ public class QualityQueriesFinder {
|
|||
|
||||
private String [] bestTerms(String field,int numTerms) throws IOException {
|
||||
PriorityQueue<TermDf> pq = new TermsDfQueue(numTerms);
|
||||
IndexReader ir = IndexReader.open(dir, true);
|
||||
IndexReader ir = IndexReader.open(dir);
|
||||
try {
|
||||
int threshold = ir.maxDoc() / 10; // ignore words too common.
|
||||
Terms terms = MultiFields.getTerms(ir, field);
|
||||
|
|
|
@ -102,7 +102,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
.setOpenMode(OpenMode.APPEND));
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
|
||||
ir.close();
|
||||
}
|
||||
|
@ -188,7 +188,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
|
||||
ir.close();
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
|
||||
ir.close();
|
||||
}
|
||||
|
@ -300,7 +300,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
// now we should be able to open the index for write.
|
||||
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
|
||||
iw.close();
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
|
||||
ir.close();
|
||||
}
|
||||
|
@ -331,7 +331,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
// 3. execute the algorithm (required in every "logic" test)
|
||||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
IndexReader r = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader r = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
DocTermsIndex idx = FieldCache.DEFAULT.getTermsIndex(r, "country");
|
||||
final int maxDoc = r.maxDoc();
|
||||
assertEquals(1000, maxDoc);
|
||||
|
@ -367,7 +367,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -432,7 +432,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
.setOpenMode(OpenMode.APPEND));
|
||||
iw.close();
|
||||
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals(numLines + " lines were created but " + ir.numDocs() + " docs are in the index", numLines, ir.numDocs());
|
||||
ir.close();
|
||||
|
||||
|
@ -476,7 +476,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
}
|
||||
|
||||
// Separately count how many tokens are actually in the index:
|
||||
IndexReader reader = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader reader = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
assertEquals(NUM_DOCS, reader.numDocs());
|
||||
|
||||
int totalTokenCount2 = 0;
|
||||
|
@ -535,7 +535,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 2 * 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -572,7 +572,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -609,7 +609,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -651,7 +651,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 16; // first 20 reuters docs, minus 20%
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -690,7 +690,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
benchmark.getRunData().getIndexWriter().close();
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -736,7 +736,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
benchmark.getRunData().getIndexWriter().close();
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
@ -780,7 +780,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
assertFalse(((LogMergePolicy) writer.getConfig().getMergePolicy()).getUseCompoundFile());
|
||||
writer.close();
|
||||
Directory dir = benchmark.getRunData().getDirectory();
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
IndexReader reader = IndexReader.open(dir);
|
||||
Fields tfv = reader.getTermVectors(0);
|
||||
assertNotNull(tfv);
|
||||
assertTrue(tfv.getUniqueFieldCount() > 0);
|
||||
|
@ -856,7 +856,7 @@ public class TestPerfTasksLogic extends BenchmarkTestCase {
|
|||
Benchmark benchmark = execBenchmark(algLines);
|
||||
|
||||
// 3. test number of docs in the index
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
|
||||
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory());
|
||||
int ndocsExpected = 20; // first 20 reuters docs.
|
||||
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
|
||||
ir.close();
|
||||
|
|
|
@ -76,7 +76,7 @@ public class TestParser extends LuceneTestCase {
|
|||
}
|
||||
d.close();
|
||||
writer.close();
|
||||
reader = IndexReader.open(dir, true);
|
||||
reader = IndexReader.open(dir);
|
||||
searcher = newSearcher(reader);
|
||||
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TestNumericRangeFilterBuilder extends LuceneTestCase {
|
|||
IndexWriter writer = new IndexWriter(ramDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
writer.commit();
|
||||
try {
|
||||
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(ramDir, true));
|
||||
IndexReader reader = new SlowMultiReaderWrapper(IndexReader.open(ramDir));
|
||||
try {
|
||||
assertNull(filter.getDocIdSet((AtomicReaderContext) reader.getTopReaderContext(), reader.getLiveDocs()));
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
|
||||
public void testFieldNonExistent() throws IOException {
|
||||
try {
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
|
||||
ld = new LuceneDictionary(indexReader, "nonexistent_field");
|
||||
it = ld.getWordsIterator();
|
||||
|
@ -98,7 +98,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
|
||||
public void testFieldAaa() throws IOException {
|
||||
try {
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
|
||||
ld = new LuceneDictionary(indexReader, "aaa");
|
||||
it = ld.getWordsIterator();
|
||||
|
@ -114,7 +114,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
|
||||
public void testFieldContents_1() throws IOException {
|
||||
try {
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
|
||||
ld = new LuceneDictionary(indexReader, "contents");
|
||||
it = ld.getWordsIterator();
|
||||
|
@ -144,7 +144,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
|
||||
public void testFieldContents_2() throws IOException {
|
||||
try {
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
|
||||
ld = new LuceneDictionary(indexReader, "contents");
|
||||
it = ld.getWordsIterator();
|
||||
|
@ -176,7 +176,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
|
||||
public void testFieldZzz() throws IOException {
|
||||
try {
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
|
||||
ld = new LuceneDictionary(indexReader, "zzz");
|
||||
it = ld.getWordsIterator();
|
||||
|
@ -194,7 +194,7 @@ public class TestLuceneDictionary extends LuceneTestCase {
|
|||
public void testSpellchecker() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
SpellChecker sc = new SpellChecker(dir);
|
||||
indexReader = IndexReader.open(store, true);
|
||||
indexReader = IndexReader.open(store);
|
||||
sc.indexDictionary(new LuceneDictionary(indexReader, "contents"), newIndexWriterConfig(TEST_VERSION_CURRENT, null), false);
|
||||
String[] suggestions = sc.suggestSimilar("Tam", 1);
|
||||
assertEquals(1, suggestions.length);
|
||||
|
|
|
@ -104,7 +104,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
|
||||
|
||||
public void testBuild() throws CorruptIndexException, IOException {
|
||||
IndexReader r = IndexReader.open(userindex, true);
|
||||
IndexReader r = IndexReader.open(userindex);
|
||||
|
||||
spellChecker.clearIndex();
|
||||
|
||||
|
@ -144,7 +144,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testComparator() throws Exception {
|
||||
IndexReader r = IndexReader.open(userindex, true);
|
||||
IndexReader r = IndexReader.open(userindex);
|
||||
Directory compIdx = newDirectory();
|
||||
SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparator());
|
||||
addwords(r, compareSP, "field3");
|
||||
|
@ -162,7 +162,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testBogusField() throws Exception {
|
||||
IndexReader r = IndexReader.open(userindex, true);
|
||||
IndexReader r = IndexReader.open(userindex);
|
||||
Directory compIdx = newDirectory();
|
||||
SpellChecker compareSP = new SpellCheckerMock(compIdx, new LevensteinDistance(), new SuggestWordFrequencyComparator());
|
||||
addwords(r, compareSP, "field3");
|
||||
|
@ -177,7 +177,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testSuggestModes() throws Exception {
|
||||
IndexReader r = IndexReader.open(userindex, true);
|
||||
IndexReader r = IndexReader.open(userindex);
|
||||
spellChecker.clearIndex();
|
||||
addwords(r, spellChecker, "field1");
|
||||
|
||||
|
@ -337,7 +337,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private int numdoc() throws IOException {
|
||||
IndexReader rs = IndexReader.open(spellindex, true);
|
||||
IndexReader rs = IndexReader.open(spellindex);
|
||||
int num = rs.numDocs();
|
||||
assertTrue(num != 0);
|
||||
//System.out.println("num docs: " + num);
|
||||
|
@ -346,7 +346,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void testClose() throws IOException {
|
||||
IndexReader r = IndexReader.open(userindex, true);
|
||||
IndexReader r = IndexReader.open(userindex);
|
||||
spellChecker.clearIndex();
|
||||
String field = "field1";
|
||||
addwords(r, spellChecker, "field1");
|
||||
|
@ -402,7 +402,7 @@ public class TestSpellChecker extends LuceneTestCase {
|
|||
*/
|
||||
public void testConcurrentAccess() throws IOException, InterruptedException {
|
||||
assertEquals(1, searchers.size());
|
||||
final IndexReader r = IndexReader.open(userindex, true);
|
||||
final IndexReader r = IndexReader.open(userindex);
|
||||
spellChecker.clearIndex();
|
||||
assertEquals(2, searchers.size());
|
||||
addwords(r, spellChecker, "field1");
|
||||
|
|
Loading…
Reference in New Issue