mirror of https://github.com/apache/lucene.git
LUCENE-3640: Remove IndexSearcher.close
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1213117 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
3c00042ca0
commit
3899e18ca3
|
@ -222,6 +222,10 @@ Changes in backwards compatibility policy
|
|||
FilterIndexReader, overriding FilterIndexReader.norms(). To persist the
|
||||
changes on disk, copy the FilteredIndexReader to a new index using
|
||||
IndexWriter.addIndexes(). (Uwe Schindler, Robert Muir)
|
||||
|
||||
* LUCENE-3640: Removed IndexSearcher.close(), because IndexSearcher no longer
|
||||
takes a Directory and no longer "manages" IndexReaders, it is a no-op.
|
||||
(Robert Muir)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
|
|
|
@ -131,7 +131,6 @@ public class SearchFiles {
|
|||
break;
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -72,26 +72,21 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "fox"));
|
||||
phraseQuery.add(new Term(FIELD, "jumped"));
|
||||
phraseQuery.setSlop(0);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "fox"));
|
||||
phraseQuery.add(new Term(FIELD, "jumped"));
|
||||
phraseQuery.setSlop(0);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(highlighter.getBestFragment(new TokenStreamConcurrent(),
|
||||
TEXT), highlighter.getBestFragment(tokenStream, TEXT));
|
||||
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(highlighter.getBestFragment(new TokenStreamConcurrent(),
|
||||
TEXT), highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -120,52 +115,48 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "fox")),
|
||||
new SpanTermQuery(new Term(FIELD, "jumped")) }, 0, true);
|
||||
final FixedBitSet bitset = new FixedBitSet(indexReader.maxDoc());
|
||||
indexSearcher.search(phraseQuery, new Collector() {
|
||||
private int baseDoc;
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "fox")),
|
||||
new SpanTermQuery(new Term(FIELD, "jumped")) }, 0, true);
|
||||
final FixedBitSet bitset = new FixedBitSet(indexReader.maxDoc());
|
||||
indexSearcher.search(phraseQuery, new Collector() {
|
||||
private int baseDoc;
|
||||
|
||||
@Override
|
||||
public boolean acceptsDocsOutOfOrder() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collect(int i) throws IOException {
|
||||
bitset.set(this.baseDoc + i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context)
|
||||
throws IOException {
|
||||
this.baseDoc = context.docBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(org.apache.lucene.search.Scorer scorer)
|
||||
throws IOException {
|
||||
// Do Nothing
|
||||
}
|
||||
});
|
||||
assertEquals(1, bitset.cardinality());
|
||||
final int maxDoc = indexReader.maxDoc();
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
for (int position = bitset.nextSetBit(0); position >= 0 && position < maxDoc-1; position = bitset
|
||||
.nextSetBit(position + 1)) {
|
||||
assertEquals(0, position);
|
||||
final TokenStream tokenStream = TokenSources.getTokenStream(
|
||||
indexReader.getTermVector(position,
|
||||
FIELD), false);
|
||||
assertEquals(highlighter.getBestFragment(new TokenStreamConcurrent(),
|
||||
TEXT), highlighter.getBestFragment(tokenStream, TEXT));
|
||||
@Override
|
||||
public boolean acceptsDocsOutOfOrder() {
|
||||
return true;
|
||||
}
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
|
||||
@Override
|
||||
public void collect(int i) throws IOException {
|
||||
bitset.set(this.baseDoc + i);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNextReader(AtomicReaderContext context)
|
||||
throws IOException {
|
||||
this.baseDoc = context.docBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setScorer(org.apache.lucene.search.Scorer scorer)
|
||||
throws IOException {
|
||||
// Do Nothing
|
||||
}
|
||||
});
|
||||
assertEquals(1, bitset.cardinality());
|
||||
final int maxDoc = indexReader.maxDoc();
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
for (int position = bitset.nextSetBit(0); position >= 0 && position < maxDoc-1; position = bitset
|
||||
.nextSetBit(position + 1)) {
|
||||
assertEquals(0, position);
|
||||
final TokenStream tokenStream = TokenSources.getTokenStream(
|
||||
indexReader.getTermVector(position,
|
||||
FIELD), false);
|
||||
assertEquals(highlighter.getBestFragment(new TokenStreamConcurrent(),
|
||||
TEXT), highlighter.getBestFragment(tokenStream, TEXT));
|
||||
}
|
||||
} finally {
|
||||
indexReader.close();
|
||||
|
@ -195,25 +186,21 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "did"));
|
||||
phraseQuery.add(new Term(FIELD, "jump"));
|
||||
phraseQuery.setSlop(0);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(0, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(
|
||||
highlighter.getBestFragment(new TokenStreamSparse(), TEXT),
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "did"));
|
||||
phraseQuery.add(new Term(FIELD, "jump"));
|
||||
phraseQuery.setSlop(0);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(0, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(
|
||||
highlighter.getBestFragment(new TokenStreamSparse(), TEXT),
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -241,23 +228,19 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "did"));
|
||||
phraseQuery.add(new Term(FIELD, "jump"));
|
||||
phraseQuery.setSlop(1);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD), true);
|
||||
assertEquals("the fox <B>did</B> not <B>jump</B>", highlighter
|
||||
.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
final PhraseQuery phraseQuery = new PhraseQuery();
|
||||
phraseQuery.add(new Term(FIELD, "did"));
|
||||
phraseQuery.add(new Term(FIELD, "jump"));
|
||||
phraseQuery.setSlop(1);
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD), true);
|
||||
assertEquals("the fox <B>did</B> not <B>jump</B>", highlighter
|
||||
.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -285,25 +268,21 @@ public class HighlighterPhraseTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "did")),
|
||||
new SpanTermQuery(new Term(FIELD, "jump")) }, 0, true);
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "did")),
|
||||
new SpanTermQuery(new Term(FIELD, "jump")) }, 0, true);
|
||||
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(0, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(
|
||||
highlighter.getBestFragment(new TokenStreamSparse(), TEXT),
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(0, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(indexReader.getTermVector(
|
||||
0, FIELD), false);
|
||||
assertEquals(
|
||||
highlighter.getBestFragment(new TokenStreamSparse(), TEXT),
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
|
|
@ -109,7 +109,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
if (VERBOSE) System.out.println(fragment);
|
||||
}
|
||||
searcher.close();
|
||||
}
|
||||
|
||||
public void testHighlightingWithDefaultField() throws Exception {
|
||||
|
@ -1271,7 +1270,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
public void run() throws Exception {
|
||||
numHighlights = 0;
|
||||
// test to show how rewritten query can still be used
|
||||
if (searcher != null) searcher.close();
|
||||
searcher = new IndexSearcher(reader);
|
||||
Analyzer analyzer = new MockAnalyzer(random, MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true);
|
||||
|
||||
|
@ -1664,7 +1662,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
if (VERBOSE) System.out.println("result:" + result);
|
||||
assertEquals("more <B>random</B> words for second field", result);
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -1703,7 +1700,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
}
|
||||
|
||||
public void doSearching(Query unReWrittenQuery) throws Exception {
|
||||
if (searcher != null) searcher.close();
|
||||
searcher = new IndexSearcher(reader);
|
||||
// for any multi-term queries to work (prefix, wildcard, range,fuzzy etc)
|
||||
// you must use a rewritten query!
|
||||
|
@ -1769,7 +1765,6 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
if (searcher != null) searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
ramDir.close();
|
||||
|
|
|
@ -114,31 +114,27 @@ public class TokenSourcesTest extends LuceneTestCase {
|
|||
indexWriter.close();
|
||||
}
|
||||
final IndexReader indexReader = IndexReader.open(directory);
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
// final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
// new SpanTermQuery(new Term(FIELD, "{fox}")),
|
||||
// new SpanTermQuery(new Term(FIELD, "fox")) }, 0, true);
|
||||
|
||||
TopDocs hits = indexSearcher.search(query, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(query));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
TopDocs hits = indexSearcher.search(query, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(query));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -166,28 +162,24 @@ public class TokenSourcesTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
// final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
// new SpanTermQuery(new Term(FIELD, "{fox}")),
|
||||
// new SpanTermQuery(new Term(FIELD, "fox")) }, 0, true);
|
||||
final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
// final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
// new SpanTermQuery(new Term(FIELD, "{fox}")),
|
||||
// new SpanTermQuery(new Term(FIELD, "fox")) }, 0, true);
|
||||
|
||||
TopDocs hits = indexSearcher.search(query, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(query));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
TopDocs hits = indexSearcher.search(query, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(query));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -214,28 +206,24 @@ public class TokenSourcesTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
// final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "the")),
|
||||
new SpanTermQuery(new Term(FIELD, "fox"))}, 0, true);
|
||||
// final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "{fox}")));
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "the")),
|
||||
new SpanTermQuery(new Term(FIELD, "fox"))}, 0, true);
|
||||
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
@ -263,28 +251,24 @@ public class TokenSourcesTest extends LuceneTestCase {
|
|||
try {
|
||||
assertEquals(1, indexReader.numDocs());
|
||||
final IndexSearcher indexSearcher = newSearcher(indexReader);
|
||||
try {
|
||||
// final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "the")));
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "the")),
|
||||
new SpanTermQuery(new Term(FIELD, "fox"))}, 0, true);
|
||||
// final DisjunctionMaxQuery query = new DisjunctionMaxQuery(1);
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "the")));
|
||||
// query.add(new SpanTermQuery(new Term(FIELD, "fox")));
|
||||
final Query phraseQuery = new SpanNearQuery(new SpanQuery[] {
|
||||
new SpanTermQuery(new Term(FIELD, "the")),
|
||||
new SpanTermQuery(new Term(FIELD, "fox"))}, 0, true);
|
||||
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexSearcher.close();
|
||||
}
|
||||
TopDocs hits = indexSearcher.search(phraseQuery, 1);
|
||||
assertEquals(1, hits.totalHits);
|
||||
final Highlighter highlighter = new Highlighter(
|
||||
new SimpleHTMLFormatter(), new SimpleHTMLEncoder(),
|
||||
new QueryScorer(phraseQuery));
|
||||
final TokenStream tokenStream = TokenSources
|
||||
.getTokenStream(
|
||||
indexReader.getTermVector(0, FIELD),
|
||||
false);
|
||||
assertEquals("<B>the fox</B> did not jump",
|
||||
highlighter.getBestFragment(tokenStream, TEXT));
|
||||
} finally {
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
|
|
|
@ -143,9 +143,7 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
|
|||
TopDocs memDocs = mem.search(qp.parse(query), 1);
|
||||
assertEquals(ramDocs.totalHits, memDocs.totalHits);
|
||||
}
|
||||
ram.close();
|
||||
reader.close();
|
||||
mem.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -69,7 +69,6 @@ public class DuplicateFilterTest extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
reader.close();
|
||||
searcher.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -60,7 +60,6 @@ public class FuzzyLikeThisQueryTest extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -71,7 +71,6 @@ public class TestSlowCollationMethods extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
collator = null;
|
||||
|
|
|
@ -56,7 +56,6 @@ public class TestRegexQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -80,7 +80,6 @@ public class TestSpanRegexQuery extends LuceneTestCase {
|
|||
// true);
|
||||
int numHits = searcher.search(sfq, null, 1000).totalHits;
|
||||
assertEquals(1, numHits);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -260,7 +260,6 @@ public class TestCartesian extends LuceneTestCase {
|
|||
assertTrue(geo_distance >= lastDistance);
|
||||
lastDistance = geo_distance;
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -332,7 +331,6 @@ public class TestCartesian extends LuceneTestCase {
|
|||
assertTrue(geo_distance >= lastDistance);
|
||||
lastDistance = geo_distance;
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -404,7 +402,6 @@ public class TestCartesian extends LuceneTestCase {
|
|||
lastDistance = geo_distance;
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -476,7 +473,6 @@ public class TestCartesian extends LuceneTestCase {
|
|||
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,8 +42,6 @@ import org.apache.lucene.index.Terms;
|
|||
import org.apache.lucene.search.similarities.DefaultSimilarityProvider;
|
||||
import org.apache.lucene.search.similarities.SimilarityProvider;
|
||||
import org.apache.lucene.store.NIOFSDirectory; // javadoc
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.ReaderUtil;
|
||||
import org.apache.lucene.util.TermContext;
|
||||
import org.apache.lucene.util.ThreadInterruptedException;
|
||||
|
@ -73,7 +71,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
|
|||
* synchronize on the <code>IndexSearcher</code> instance;
|
||||
* use your own (non-Lucene) objects instead.</p>
|
||||
*/
|
||||
public class IndexSearcher implements Closeable {
|
||||
public class IndexSearcher {
|
||||
final IndexReader reader; // package private for testing!
|
||||
|
||||
// NOTE: these members might change in incompatible ways
|
||||
|
@ -198,10 +196,6 @@ public class IndexSearcher implements Closeable {
|
|||
public SimilarityProvider getSimilarityProvider() {
|
||||
return similarityProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
}
|
||||
|
||||
/** @lucene.internal */
|
||||
protected Query wrapFilter(Query query, Filter filter) {
|
||||
|
|
|
@ -102,7 +102,6 @@ public abstract class CollationTestBase extends LuceneTestCase {
|
|||
(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).scoreDocs;
|
||||
assertEquals("The index Term should be included.", 1, result.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -131,7 +130,6 @@ public abstract class CollationTestBase extends LuceneTestCase {
|
|||
query = new TermRangeQuery("content", secondBeg, secondEnd, true, true);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("The index Term should be included.", 1, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -164,7 +162,6 @@ public abstract class CollationTestBase extends LuceneTestCase {
|
|||
("content", secondBeg, secondEnd, true, true);
|
||||
result = search.search(csrq, null, 1000).scoreDocs;
|
||||
assertEquals("The index Term should be included.", 1, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
// Test using various international locales with accented characters (which
|
||||
|
@ -240,7 +237,6 @@ public abstract class CollationTestBase extends LuceneTestCase {
|
|||
|
||||
sort.setSort(new SortField("Denmark", SortField.Type.STRING));
|
||||
assertMatches(searcher, queryY, sort, dkResult);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -472,7 +472,6 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas
|
|||
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
sum += searcher.search(new TermQuery(new Term("body", "united")), 10).totalHits;
|
||||
searcher.close();
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("TEST: warm visited " + sum + " fields");
|
||||
|
|
|
@ -116,7 +116,6 @@ public class CheckHits {
|
|||
query.toString(defaultFieldName),
|
||||
correct, actual);
|
||||
FieldCache.DEFAULT.purge(s.getIndexReader()); // our wrapping can create insanity otherwise
|
||||
s.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -115,13 +115,10 @@ public class QueryUtils {
|
|||
IndexSearcher wrapped;
|
||||
check(random, q1, wrapped = wrapUnderlyingReader(random, s, -1), false);
|
||||
FieldCache.DEFAULT.purge(wrapped.getIndexReader()); // // our wrapping can create insanity otherwise
|
||||
wrapped.close();
|
||||
check(random, q1, wrapped = wrapUnderlyingReader(random, s, 0), false);
|
||||
FieldCache.DEFAULT.purge(wrapped.getIndexReader()); // // our wrapping can create insanity otherwise
|
||||
wrapped.close();
|
||||
check(random, q1, wrapped = wrapUnderlyingReader(random, s, +1), false);
|
||||
FieldCache.DEFAULT.purge(wrapped.getIndexReader()); // // our wrapping can create insanity otherwise
|
||||
wrapped.close();
|
||||
}
|
||||
checkExplanations(q1,s);
|
||||
|
||||
|
@ -309,7 +306,6 @@ public class QueryUtils {
|
|||
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
|
||||
}
|
||||
leafPtr++;
|
||||
indexSearcher.close();
|
||||
}
|
||||
lastReader[0] = context.reader;
|
||||
assert readerContextArray[leafPtr].reader == context.reader;
|
||||
|
@ -335,7 +331,6 @@ public class QueryUtils {
|
|||
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
|
||||
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
|
||||
}
|
||||
indexSearcher.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -394,7 +389,6 @@ public class QueryUtils {
|
|||
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
|
||||
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
|
||||
}
|
||||
indexSearcher.close();
|
||||
leafPtr++;
|
||||
}
|
||||
|
||||
|
@ -419,7 +413,6 @@ public class QueryUtils {
|
|||
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
|
||||
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
|
||||
}
|
||||
indexSearcher.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.index.IndexReader.ReaderFinishedListener;
|
||||
import org.apache.lucene.index.codecs.Codec;
|
||||
import org.apache.lucene.index.codecs.PostingsFormat;
|
||||
import org.apache.lucene.index.codecs.appending.AppendingCodec;
|
||||
|
@ -731,7 +732,8 @@ public abstract class LuceneTestCase extends Assert {
|
|||
rogueThreads.put(t, true);
|
||||
rogueCount++;
|
||||
if (t.getName().startsWith("LuceneTestCase")) {
|
||||
System.err.println("PLEASE CLOSE YOUR INDEXSEARCHERS IN YOUR TEST!!!!");
|
||||
// TODO: should we fail here now? really test should be failing?
|
||||
System.err.println("PLEASE CLOSE YOUR INDEXREADERS IN YOUR TEST!!!!");
|
||||
continue;
|
||||
} else {
|
||||
// wait on the thread to die of natural causes
|
||||
|
@ -1228,23 +1230,25 @@ public abstract class LuceneTestCase extends Assert {
|
|||
final ExecutorService ex = (random.nextBoolean()) ? null
|
||||
: Executors.newFixedThreadPool(threads = _TestUtil.nextInt(random, 1, 8),
|
||||
new NamedThreadFactory("LuceneTestCase"));
|
||||
if (ex != null && VERBOSE) {
|
||||
if (ex != null) {
|
||||
if (VERBOSE) {
|
||||
System.out.println("NOTE: newSearcher using ExecutorService with " + threads + " threads");
|
||||
}
|
||||
final IndexReader r0 = r;
|
||||
r.addReaderFinishedListener(new ReaderFinishedListener() {
|
||||
@Override
|
||||
public void finished(IndexReader reader) {
|
||||
// readerFinishedListener bogusly calls us with other random readers
|
||||
// so we must check that its *actually* the one we registered it on.
|
||||
if (reader == r0) {
|
||||
shutdownExecutorService(ex);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
IndexSearcher ret = random.nextBoolean() ?
|
||||
new AssertingIndexSearcher(random, r, ex) {
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
shutdownExecutorService(ex);
|
||||
}
|
||||
} : new AssertingIndexSearcher(random, r.getTopReaderContext(), ex) {
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
shutdownExecutorService(ex);
|
||||
}
|
||||
};
|
||||
IndexSearcher ret = random.nextBoolean()
|
||||
? new AssertingIndexSearcher(random, r, ex)
|
||||
: new AssertingIndexSearcher(random, r.getTopReaderContext(), ex);
|
||||
ret.setSimilarityProvider(similarityProvider);
|
||||
return ret;
|
||||
}
|
||||
|
|
|
@ -73,7 +73,6 @@ public class TestDemo extends LuceneTestCase {
|
|||
phraseQuery.add(new Term("fieldname", "be"));
|
||||
assertEquals(1, isearcher.search(phraseQuery, null, 1).totalHits);
|
||||
|
||||
isearcher.close();
|
||||
ireader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -96,7 +96,6 @@ public class TestExternalCodecs extends LuceneTestCase {
|
|||
assertEquals(NUM_DOCS-1, s.search(new TermQuery(new Term("field1", "standard")), 1).totalHits);
|
||||
assertEquals(NUM_DOCS-1, s.search(new TermQuery(new Term("field2", "pulsing")), 1).totalHits);
|
||||
r.close();
|
||||
s.close();
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("\nTEST: now delete 2nd doc");
|
||||
|
@ -114,7 +113,6 @@ public class TestExternalCodecs extends LuceneTestCase {
|
|||
assertEquals(0, s.search(new TermQuery(new Term("id", "44")), 1).totalHits);
|
||||
|
||||
r.close();
|
||||
s.close();
|
||||
|
||||
w.close();
|
||||
|
||||
|
|
|
@ -122,7 +122,6 @@ public class TestSearch extends LuceneTestCase {
|
|||
out.println(i + " " + hits[i].score + " " + d.get("contents"));
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -115,8 +115,6 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
|||
printHits(out, hits, searcher);
|
||||
checkHits(hits, MAX_DOCS, searcher);
|
||||
|
||||
searcher.close();
|
||||
|
||||
// try a new search with OR
|
||||
searcher = new IndexSearcher(reader);
|
||||
hits = null;
|
||||
|
@ -130,7 +128,6 @@ public class TestSearchForDuplicates extends LuceneTestCase {
|
|||
printHits(out, hits, searcher);
|
||||
checkHits(hits, MAX_DOCS, searcher);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -174,7 +174,6 @@ public class TestDocument extends LuceneTestCase {
|
|||
|
||||
doAssert(searcher.doc(hits[0].doc), true);
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -270,7 +269,6 @@ public class TestDocument extends LuceneTestCase {
|
|||
else fail("unexpected id field");
|
||||
}
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
assertEquals("did not see all IDs", 7, result);
|
||||
|
|
|
@ -336,7 +336,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs;
|
||||
assertEquals(34, hits.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -374,7 +373,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
Document d = searcher.getIndexReader().document(hits[0].doc);
|
||||
assertEquals("wrong first document", "21", d.get("id"));
|
||||
doTestHits(hits, 44, searcher.getIndexReader());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
// fully merge
|
||||
|
@ -389,7 +387,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
d = searcher.doc(hits[0].doc);
|
||||
doTestHits(hits, 44, searcher.getIndexReader());
|
||||
assertEquals("wrong first document", "21", d.get("id"));
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
dir.close();
|
||||
|
@ -406,7 +403,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
assertEquals("wrong number of hits", 34, hits.length);
|
||||
Document d = searcher.doc(hits[0].doc);
|
||||
assertEquals("wrong first document", "21", d.get("id"));
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
// fully merge
|
||||
|
@ -419,7 +415,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
|
||||
assertEquals("wrong number of hits", 34, hits.length);
|
||||
doTestHits(hits, 34, searcher.getIndexReader());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
dir.close();
|
||||
|
@ -664,7 +659,6 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
|
|||
assertTrue("value in id bounds", val >= 0L && val < 35L);
|
||||
}
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
_TestUtil.rmDir(oldIndexDir);
|
||||
|
|
|
@ -70,7 +70,6 @@ public class TestBinaryTerms extends LuceneTestCase {
|
|||
assertEquals("" + i, is.doc(docs.scoreDocs[0].doc).get("id"));
|
||||
}
|
||||
|
||||
is.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -393,7 +393,6 @@ public class TestCodecs extends LuceneTestCase {
|
|||
return searcher.search(q, null, n).scoreDocs;
|
||||
}
|
||||
finally {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -126,7 +126,6 @@ public class TestCrashCausesCorruptIndex extends LuceneTestCase {
|
|||
TopDocs topDocs = indexSearcher.search(new TermQuery(new Term(TEXT_FIELD, "fleas")), 10);
|
||||
assertNotNull(topDocs);
|
||||
assertEquals(expectedTotalHits, topDocs.totalHits);
|
||||
indexSearcher.close();
|
||||
indexReader.close();
|
||||
realDirectory.close();
|
||||
}
|
||||
|
|
|
@ -660,7 +660,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals(16, hits.length);
|
||||
reader.close();
|
||||
searcher.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
|
@ -685,7 +684,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
dir.deleteFile(IndexFileNames.SEGMENTS_GEN);
|
||||
int expectedCount = 0;
|
||||
|
||||
searcher.close();
|
||||
rwReader.close();
|
||||
|
||||
for(int i=0;i<N+1;i++) {
|
||||
|
@ -697,7 +695,6 @@ public class TestDeletionPolicy extends LuceneTestCase {
|
|||
searcher = newSearcher(reader);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals(expectedCount, hits.length);
|
||||
searcher.close();
|
||||
if (expectedCount == 0) {
|
||||
expectedCount = 16;
|
||||
} else if (expectedCount == 16) {
|
||||
|
|
|
@ -75,7 +75,6 @@ public class TestForTooMuchCloning extends LuceneTestCase {
|
|||
final int queryCloneCount = dir.getInputCloneCount() - cloneCount;
|
||||
//System.out.println("query clone count=" + queryCloneCount);
|
||||
assertTrue("too many calls to IndexInput.clone during TermRangeQuery: " + queryCloneCount, queryCloneCount < 50);
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -515,7 +515,6 @@ public class TestIndexReaderReopen extends LuceneTestCase {
|
|||
if (hits.length > 0) {
|
||||
searcher.doc(hits[0].doc);
|
||||
}
|
||||
searcher.close();
|
||||
if (refreshed != r) {
|
||||
refreshed.close();
|
||||
}
|
||||
|
|
|
@ -456,7 +456,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("field", "aaa")), null, 1000).scoreDocs;
|
||||
assertEquals(300, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
dir.close();
|
||||
|
@ -488,7 +487,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals(10, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
|
@ -511,7 +509,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals(27, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -590,7 +587,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("did not get right number of hits", 100, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
|
@ -1013,7 +1009,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals(0, tps.nextPosition());
|
||||
w.close();
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -1708,7 +1703,6 @@ public class TestIndexWriter extends LuceneTestCase {
|
|||
assertEquals("doc " + testID + ", field f" + fieldCount + " is wrong", docExp.get("f"+i), doc.get("f"+i));
|
||||
}
|
||||
}
|
||||
s.close();
|
||||
r.close();
|
||||
w.forceMerge(1);
|
||||
}
|
||||
|
|
|
@ -55,7 +55,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("first number of hits", 14, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -69,7 +68,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(r);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||
searcher.close();
|
||||
r.close();
|
||||
assertTrue("reader should have still been current", reader.isCurrent());
|
||||
}
|
||||
|
@ -82,7 +80,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(r);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("reader did not see changes after writer was closed", 47, hits.length);
|
||||
searcher.close();
|
||||
r.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
|
@ -109,7 +106,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("first number of hits", 14, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random))
|
||||
|
@ -124,7 +120,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
// Now, close the writer:
|
||||
|
@ -136,7 +131,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("saw changes after writer.abort", 14, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
// Now make sure we can re-open the index, add docs,
|
||||
|
@ -156,7 +150,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(r);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
|
||||
searcher.close();
|
||||
r.close();
|
||||
}
|
||||
|
||||
|
@ -165,7 +158,6 @@ public class TestIndexWriterCommit extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(r);
|
||||
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("didn't see changes after close", 218, hits.length);
|
||||
searcher.close();
|
||||
r.close();
|
||||
|
||||
dir.close();
|
||||
|
|
|
@ -403,7 +403,6 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
IndexReader reader = IndexReader.open(dir);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
int hitCount = searcher.search(new TermQuery(term), null, 1000).totalHits;
|
||||
searcher.close();
|
||||
reader.close();
|
||||
return hitCount;
|
||||
}
|
||||
|
@ -612,7 +611,6 @@ public class TestIndexWriterDelete extends LuceneTestCase {
|
|||
+ result2 + " instead of expected " + START_COUNT + " or " + END_COUNT);
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
newReader.close();
|
||||
if (result2 == END_COUNT) {
|
||||
break;
|
||||
|
|
|
@ -195,7 +195,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
IndexSearcher searcher = newSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
|
||||
assertEquals("first number of hits", 57, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
// Iterate with larger and larger amounts of free
|
||||
|
@ -395,7 +394,6 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
if (VERBOSE) {
|
||||
System.out.println(" count is " + result);
|
||||
|
|
|
@ -695,7 +695,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Query q = new TermQuery(new Term("indexname", "test"));
|
||||
IndexSearcher searcher = newSearcher(r);
|
||||
assertEquals(100, searcher.search(q, 10).totalHits);
|
||||
searcher.close();
|
||||
try {
|
||||
IndexReader.openIfChanged(r);
|
||||
fail("failed to hit AlreadyClosedException");
|
||||
|
@ -761,7 +760,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Query q = new TermQuery(new Term("indexname", "test"));
|
||||
IndexSearcher searcher = newSearcher(r);
|
||||
final int count = searcher.search(q, 10).totalHits;
|
||||
searcher.close();
|
||||
assertTrue(count >= lastCount);
|
||||
lastCount = count;
|
||||
}
|
||||
|
@ -778,7 +776,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Query q = new TermQuery(new Term("indexname", "test"));
|
||||
IndexSearcher searcher = newSearcher(r);
|
||||
final int count = searcher.search(q, 10).totalHits;
|
||||
searcher.close();
|
||||
assertTrue(count >= lastCount);
|
||||
|
||||
assertEquals(0, excs.size());
|
||||
|
@ -850,7 +847,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Query q = new TermQuery(new Term("indexname", "test"));
|
||||
IndexSearcher searcher = newSearcher(r);
|
||||
sum += searcher.search(q, 10).totalHits;
|
||||
searcher.close();
|
||||
}
|
||||
|
||||
for(int i=0;i<numThreads;i++) {
|
||||
|
@ -865,7 +861,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
Query q = new TermQuery(new Term("indexname", "test"));
|
||||
IndexSearcher searcher = newSearcher(r);
|
||||
sum += searcher.search(q, 10).totalHits;
|
||||
searcher.close();
|
||||
assertTrue("no documents found at all", sum > 0);
|
||||
|
||||
assertEquals(0, excs.size());
|
||||
|
@ -954,7 +949,6 @@ public class TestIndexWriterReader extends LuceneTestCase {
|
|||
TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
|
||||
assertEquals(20, hits.totalHits);
|
||||
didWarm.set(true);
|
||||
s.close();
|
||||
}
|
||||
}).
|
||||
setMergePolicy(newLogMergePolicy(10))
|
||||
|
|
|
@ -138,9 +138,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
|
|||
// test whether only the minimum amount of seeks()
|
||||
// are performed
|
||||
performTest(5);
|
||||
searcher.close();
|
||||
performTest(10);
|
||||
searcher.close();
|
||||
}
|
||||
|
||||
public void testSeek() throws IOException {
|
||||
|
|
|
@ -100,7 +100,6 @@ public class TestNRTThreads extends ThreadedIndexingAndSearchingTestCase {
|
|||
if (s != fixedSearcher) {
|
||||
// Final searcher:
|
||||
s.getIndexReader().close();
|
||||
s.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -400,8 +400,7 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
}
|
||||
});
|
||||
assertEquals(15, CountingHitCollector.getCount());
|
||||
|
||||
searcher.close();
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -46,9 +46,7 @@ public class TestParallelReader extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
single.getIndexReader().close();
|
||||
single.close();
|
||||
parallel.getIndexReader().close();
|
||||
parallel.close();
|
||||
dir.close();
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
|
|
|
@ -61,7 +61,6 @@ public class TestReaderClosed extends LuceneTestCase {
|
|||
public void test() throws Exception {
|
||||
TermRangeQuery query = TermRangeQuery.newStringRange("field", "a", "z", true, true);
|
||||
searcher.search(query, 5);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
try {
|
||||
searcher.search(query, 5);
|
||||
|
|
|
@ -105,7 +105,6 @@ public class TestStressIndexing extends LuceneTestCase {
|
|||
for (int i=0; i<100; i++) {
|
||||
IndexReader ir = IndexReader.open(directory);
|
||||
IndexSearcher is = new IndexSearcher(ir);
|
||||
is.close();
|
||||
ir.close();
|
||||
}
|
||||
count += 100;
|
||||
|
|
|
@ -84,7 +84,6 @@ public class TestTermsEnum2 extends LuceneTestCase {
|
|||
}
|
||||
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -136,7 +136,6 @@ public class TestTermInfosReaderIndex extends LuceneTestCase {
|
|||
TopDocs topDocs = searcher.search(query, 10);
|
||||
assertTrue(topDocs.totalHits > 0);
|
||||
}
|
||||
searcher.close();
|
||||
}
|
||||
|
||||
private List<Term> sample(IndexReader reader, int size) throws IOException {
|
||||
|
|
|
@ -194,7 +194,6 @@ public class TestPerFieldPostingsFormat extends LuceneTestCase {
|
|||
IndexSearcher searcher = newSearcher(reader);
|
||||
TopDocs search = searcher.search(new TermQuery(t), num + 10);
|
||||
assertEquals(num, search.totalHits);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
}
|
||||
|
|
|
@ -67,7 +67,6 @@ public class TestAutomatonQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -86,7 +86,6 @@ public class TestAutomatonQueryUnicode extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -101,12 +101,10 @@ public class TestBoolean2 extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
littleReader.close();
|
||||
dir2.close();
|
||||
directory.close();
|
||||
bigSearcher.close();
|
||||
searcher = null;
|
||||
reader = null;
|
||||
littleReader = null;
|
||||
|
|
|
@ -73,7 +73,6 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
s.close();
|
||||
s = null;
|
||||
r.close();
|
||||
r = null;
|
||||
|
|
|
@ -158,7 +158,6 @@ public class TestBooleanOr extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -140,7 +140,6 @@ public class TestBooleanQuery extends LuceneTestCase {
|
|||
dmq.add(pq);
|
||||
assertEquals(1, s.search(dmq, 10).totalHits);
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
w.close();
|
||||
dir.close();
|
||||
|
|
|
@ -62,7 +62,6 @@ public class TestBooleanScorer extends LuceneTestCase
|
|||
IndexSearcher indexSearcher = newSearcher(ir);
|
||||
ScoreDoc[] hits = indexSearcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("Number of matched documents", 2, hits.length);
|
||||
indexSearcher.close();
|
||||
ir.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -122,7 +121,6 @@ public class TestBooleanScorer extends LuceneTestCase
|
|||
|
||||
assertEquals("should have only 1 hit", 1, hits.size());
|
||||
assertEquals("hit should have been docID=3000", 3000, hits.get(0).intValue());
|
||||
searcher.close();
|
||||
ir.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -176,7 +174,6 @@ public class TestBooleanScorer extends LuceneTestCase
|
|||
|
||||
assertEquals(1, count[0]);
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
d.close();
|
||||
}
|
||||
|
|
|
@ -179,7 +179,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
writer.addDocument(doc);
|
||||
|
||||
reader = refreshReader(reader);
|
||||
searcher.close();
|
||||
searcher = newSearcher(reader, false);
|
||||
|
||||
TopDocs docs = searcher.search(new MatchAllDocsQuery(), 1);
|
||||
|
@ -205,7 +204,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
|
||||
writer.addDocument(doc);
|
||||
reader = refreshReader(reader);
|
||||
searcher.close();
|
||||
searcher = newSearcher(reader, false);
|
||||
|
||||
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
|
||||
|
@ -227,7 +225,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
writer.deleteDocuments(new Term("id", "1"));
|
||||
|
||||
reader = refreshReader(reader);
|
||||
searcher.close();
|
||||
searcher = newSearcher(reader, false);
|
||||
|
||||
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
|
||||
|
@ -245,7 +242,6 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
|
|||
assertTrue(oldReader != null);
|
||||
assertTrue(oldReader2 != null);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
writer.close();
|
||||
dir.close();
|
||||
|
|
|
@ -125,7 +125,6 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
|||
checkHits(searcher, bq, csq1.getBoost() + csq2.getBoost(), bucketScorerClass, null);
|
||||
checkHits(searcher, csqbq, csqbq.getBoost(), ConstantScoreQuery.ConstantScorer.class.getName(), bucketScorerClass);
|
||||
} finally {
|
||||
if (searcher != null) searcher.close();
|
||||
if (reader != null) reader.close();
|
||||
if (directory != null) directory.close();
|
||||
}
|
||||
|
|
|
@ -97,7 +97,6 @@ public class TestDateFilter extends LuceneTestCase {
|
|||
|
||||
result = searcher.search(query2, df2, 1000).scoreDocs;
|
||||
assertEquals(0, result.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -164,7 +163,6 @@ public class TestDateFilter extends LuceneTestCase {
|
|||
|
||||
result = searcher.search(query2, df2, 1000).scoreDocs;
|
||||
assertEquals(0, result.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
|
|
@ -88,7 +88,6 @@ public class TestDateSort extends LuceneTestCase {
|
|||
String text = document.get(TEXT_FIELD);
|
||||
actualOrder[i] = text;
|
||||
}
|
||||
searcher.close();
|
||||
|
||||
// Set up the expected order (i.e. Document 5, 4, 3, 2, 1).
|
||||
String[] expectedOrder = new String[5];
|
||||
|
|
|
@ -163,7 +163,6 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
s.close();
|
||||
r.close();
|
||||
index.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -121,7 +121,6 @@ public class TestDocIdSet extends LuceneTestCase {
|
|||
};
|
||||
|
||||
Assert.assertEquals(0, searcher.search(new MatchAllDocsQuery(), f, 10).totalHits);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -133,9 +133,6 @@ public class TestDocValuesScoring extends LuceneTestCase {
|
|||
|
||||
assertEquals(boost.scoreDocs[0].score, noboost.scoreDocs[0].score, SCORE_EPSILON);
|
||||
|
||||
|
||||
searcher1.close();
|
||||
searcher2.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -61,7 +61,6 @@ public class TestElevationComparator extends LuceneTestCase {
|
|||
runTest(searcher, true);
|
||||
runTest(searcher, false);
|
||||
|
||||
searcher.close();
|
||||
r.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -61,7 +61,6 @@ public class TestExplanations extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClassTestExplanations() throws Exception {
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
|
|
|
@ -122,7 +122,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
|
||||
result = search.search(q,FieldCacheRangeFilter.newStringRange("id",medIP,medIP,T,T), numDocs).scoreDocs;
|
||||
assertEquals("med,med,T,T", 1, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -185,7 +184,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("max,max,T,T", 1, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newStringRange("rand",maxRP,null,T,F), numDocs).scoreDocs;
|
||||
assertEquals("max,nul,T,T", 1, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
// byte-ranges cannot be tested, because all ranges are too big for bytes, need an extra range for that
|
||||
|
@ -278,7 +276,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("overflow special case", 0, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newShortRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
|
||||
assertEquals("inverse range", 0, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -370,7 +367,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("overflow special case", 0, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newIntRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
|
||||
assertEquals("inverse range", 0, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -462,7 +458,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("overflow special case", 0, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newLongRange("id",maxIdO,minIdO,T,T), numDocs).scoreDocs;
|
||||
assertEquals("inverse range", 0, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
// float and double tests are a bit minimalistic, but its complicated, because missing precision
|
||||
|
@ -494,7 +489,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("infinity special case", 0, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newFloatRange("id",null,Float.valueOf(Float.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
|
||||
assertEquals("infinity special case", 0, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -524,7 +518,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
assertEquals("infinity special case", 0, result.length);
|
||||
result = search.search(q,FieldCacheRangeFilter.newDoubleRange("id",null, Double.valueOf(Double.NEGATIVE_INFINITY),F,F), numDocs).scoreDocs;
|
||||
assertEquals("infinity special case", 0, result.length);
|
||||
search.close();
|
||||
}
|
||||
|
||||
// test using a sparse index (with deleted docs).
|
||||
|
@ -565,7 +558,6 @@ public class TestFieldCacheRangeFilter extends BaseTestRangeFilter {
|
|||
|
||||
result = search.search(q,FieldCacheRangeFilter.newByteRange("id",Byte.valueOf((byte) -20),Byte.valueOf((byte) -10),T,T), 100).scoreDocs;
|
||||
assertEquals("find all", 11, result.length);
|
||||
search.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -68,7 +68,6 @@ public class TestFieldCacheTermsFilter extends LuceneTestCase {
|
|||
results = searcher.search(q, new FieldCacheTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs;
|
||||
assertEquals("Must match 2", 2, results.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
rd.close();
|
||||
}
|
||||
|
|
|
@ -58,7 +58,6 @@ public class TestFieldValueFilter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
reader.close();
|
||||
searcher.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
|
@ -86,7 +85,6 @@ public class TestFieldValueFilter extends LuceneTestCase {
|
|||
}
|
||||
|
||||
reader.close();
|
||||
searcher.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -104,7 +104,6 @@ public class TestFilteredQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -82,7 +82,6 @@ public class TestFilteredSearch extends LuceneTestCase {
|
|||
IndexSearcher indexSearcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = indexSearcher.search(booleanQuery, filter, 1000).scoreDocs;
|
||||
assertEquals("Number of matched documents", 1, hits.length);
|
||||
indexSearcher.close();
|
||||
reader.close();
|
||||
}
|
||||
catch (IOException e) {
|
||||
|
|
|
@ -185,7 +185,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals(0, hits.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -275,7 +274,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
// expecting exception
|
||||
}
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -312,7 +310,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
FuzzyQuery fq = new FuzzyQuery(new Term("field", "z123456"), 1f, 0, 2);
|
||||
TopDocs docs = searcher.search(fq, 2);
|
||||
assertEquals(5, docs.totalHits); // 5 docs, from the a and b's
|
||||
searcher.close();
|
||||
mr.close();
|
||||
ir1.close();
|
||||
ir2.close();
|
||||
|
@ -353,7 +350,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals(0, hits.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -378,7 +374,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
assertEquals("Lucene", reader.document(hits[0].doc).get("field"));
|
||||
assertEquals("Lucene", reader.document(hits[1].doc).get("field"));
|
||||
assertEquals("Lucenne", reader.document(hits[2].doc).get("field"));
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -416,7 +411,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
ScoreDoc[] hits = searcher.search(q, 10).scoreDocs;
|
||||
assertEquals(1, hits.length);
|
||||
assertEquals("Giga byte", searcher.doc(hits[0].doc).get("field"));
|
||||
searcher.close();
|
||||
r.close();
|
||||
index.close();
|
||||
}
|
||||
|
@ -457,7 +451,6 @@ public class TestFuzzyQuery extends LuceneTestCase {
|
|||
assertEquals("test", searcher.doc(hits[0].doc).get("field"));
|
||||
assertEquals("foobar", searcher.doc(hits[1].doc).get("field"));
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
index.close();
|
||||
}
|
||||
|
|
|
@ -139,7 +139,6 @@ public class TestFuzzyQuery2 extends LuceneTestCase {
|
|||
assertEquals(Float.parseFloat(scoreDoc[1]), docs.scoreDocs[i].score, epsilon);
|
||||
}
|
||||
}
|
||||
searcher.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -70,7 +70,6 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
|
|||
assertEquals(1, hits.length);
|
||||
|
||||
iw.deleteDocuments(new Term("key", "one"));
|
||||
is.close();
|
||||
ir.close();
|
||||
ir = IndexReader.open(iw, true);
|
||||
is = newSearcher(ir);
|
||||
|
@ -79,7 +78,6 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
|
|||
assertEquals(2, hits.length);
|
||||
|
||||
iw.close();
|
||||
is.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -142,7 +142,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -162,7 +161,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
q.add(new Term("body", "chocolate"));
|
||||
q.add(new Term[] {new Term("body", "pie"), new Term("body", "tart")});
|
||||
assertEquals(2, searcher.search(q, 1).totalHits);
|
||||
searcher.close();
|
||||
r.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -205,7 +203,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
searcher.explain(q, 0);
|
||||
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -233,7 +230,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
|
||||
assertEquals("Wrong number of hits", 0, hits.length);
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -256,7 +252,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
searcher.explain(q, 0);
|
||||
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -330,7 +325,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f);
|
||||
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
|
|
@ -161,7 +161,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
result[i].score, SCORE_COMP_THRESH);
|
||||
}
|
||||
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -247,7 +246,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
Assert.assertEquals(0, hits[0].doc);
|
||||
Assert.assertEquals(1, hits[1].doc);
|
||||
assertTrue(hits[0].score > hits[1].score);
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -278,8 +276,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
assertEquals("mismatch in docid for hit#" + i, expected[i].doc,
|
||||
actual[i].doc);
|
||||
}
|
||||
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -412,8 +408,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
|
||||
result = search.search(csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT), null, numDocs).scoreDocs;
|
||||
assertEquals("med,med,T,T", 1, result.length);
|
||||
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -476,7 +470,5 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
assertEquals("max,max,T,T", 1, result.length);
|
||||
result = search.search(csrq("rand", maxRP, null, T, F), null, numDocs).scoreDocs;
|
||||
assertEquals("max,nul,T,T", 1, result.length);
|
||||
|
||||
search.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,7 +73,6 @@ public class TestMultiValuedNumericRangeQuery extends LuceneTestCase {
|
|||
TopDocs nrTopDocs = searcher.search(tq, 1);
|
||||
assertEquals("Returned count for NumericRangeQuery and TermRangeQuery must be equal", trTopDocs.totalHits, nrTopDocs.totalHits );
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -51,7 +51,6 @@ public class TestNot extends LuceneTestCase {
|
|||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals(0, hits.length);
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
store.close();
|
||||
}
|
||||
|
|
|
@ -96,7 +96,6 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
|
@ -335,7 +334,6 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
|||
topDocs = s.search(q, 10);
|
||||
assertEquals("Score doc count", TestNumericUtils.FLOAT_NANs.length, topDocs.scoreDocs.length );
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -99,7 +99,6 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
|
@ -354,7 +353,6 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
|||
topDocs = s.search(q, 10);
|
||||
assertEquals("Score doc count", TestNumericUtils.DOUBLE_NANs.length, topDocs.scoreDocs.length );
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -93,7 +93,6 @@ public class TestPhrasePrefixQuery extends LuceneTestCase {
|
|||
|
||||
result = searcher.search(query2, null, 1000).scoreDocs;
|
||||
assertEquals(0, result.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
|
|
@ -98,7 +98,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
|
@ -249,7 +248,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
QueryUtils.check(random, query,searcher);
|
||||
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -289,7 +287,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
QueryUtils.check(random, termQuery,searcher);
|
||||
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
writer = new RandomIndexWriter(random, directory,
|
||||
|
@ -336,7 +333,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
QueryUtils.check(random, booleanQuery,searcher);
|
||||
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -380,7 +376,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
assertEquals(0.31, hits[2].score, 0.01);
|
||||
assertEquals(2, hits[2].doc);
|
||||
QueryUtils.check(random, query,searcher);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
@ -695,7 +690,6 @@ public class TestPhraseQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
reader.close();
|
||||
s.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,7 +196,6 @@ public class TestPositionIncrement extends LuceneTestCase {
|
|||
hits = searcher.search(q, null, 1000).scoreDocs;
|
||||
assertEquals(0, hits.length);
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
store.close();
|
||||
}
|
||||
|
|
|
@ -89,7 +89,6 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
|||
for (int i = 0; i < sd.length; i++) {
|
||||
assertTrue("only positive scores should return: " + sd[i].score, sd[i].score > 0);
|
||||
}
|
||||
searcher.close();
|
||||
ir.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -101,7 +101,6 @@ public class TestPrefixFilter extends LuceneTestCase {
|
|||
assertEquals(0, hits.length);
|
||||
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -76,7 +76,6 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
reader.close();
|
||||
reader = null;
|
||||
|
|
|
@ -61,7 +61,6 @@ public class TestPrefixQuery extends LuceneTestCase {
|
|||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("everything", 3, hits.length);
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -75,7 +75,6 @@ public class TestPrefixRandom extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
reader.close();
|
||||
searcher.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -81,7 +81,6 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
|
|||
assertEquals(0, hits.totalHits);
|
||||
hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10);
|
||||
assertEquals(0, hits.totalHits);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -148,7 +147,6 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
|
|||
assertEquals(1, td.totalHits);
|
||||
}
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -59,7 +59,6 @@ public class TestRegexpQuery extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -95,7 +95,6 @@ public class TestRegexpRandom extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -95,8 +95,6 @@ public class TestRegexpRandom2 extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
reader.close();
|
||||
searcher1.close();
|
||||
searcher2.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
}
|
||||
|
|
|
@ -119,7 +119,6 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
|
|||
for (int i = 0; i < scores.length; i++) {
|
||||
assertEquals(scores[i], scc.mscores[i], 0f);
|
||||
}
|
||||
searcher.close();
|
||||
ir.close();
|
||||
directory.close();
|
||||
}
|
||||
|
|
|
@ -323,7 +323,6 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
sets=randBitSets(atLeast(1000), atLeast(10));
|
||||
doConjunctions(atLeast(10000), atLeast(5));
|
||||
doNestedConjunctions(atLeast(10000), atLeast(3), atLeast(3));
|
||||
s.close();
|
||||
r.close();
|
||||
d.close();
|
||||
}
|
||||
|
|
|
@ -54,7 +54,6 @@ public class TestSearchAfter extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -108,7 +108,6 @@ public class TestSearchWithThreads extends LuceneTestCase {
|
|||
|
||||
if (VERBOSE) System.out.println(NUM_SEARCH_THREADS + " threads did " + netSearch.get() + " searches");
|
||||
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -169,7 +169,6 @@ public class TestSimilarity extends LuceneTestCase {
|
|||
}
|
||||
});
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
store.close();
|
||||
}
|
||||
|
|
|
@ -67,7 +67,6 @@ public class TestSimilarityProvider extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -149,7 +149,6 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
|
||||
//QueryUtils.check(query,searcher);
|
||||
writer.close();
|
||||
searcher.close();
|
||||
reader.close();
|
||||
ramDir.close();
|
||||
|
||||
|
@ -262,7 +261,6 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
assertEquals(3, is.search(pq, 4).totalHits);
|
||||
pq.setSlop(2);
|
||||
assertEquals(4, is.search(pq, 4).totalHits);
|
||||
is.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -286,7 +284,6 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
pq.add(new Term("lyrics", "drug"), 3);
|
||||
pq.setSlop(1);
|
||||
assertSaneScoring(pq, is);
|
||||
is.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -341,7 +338,6 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
|
|||
pq.add(new Term("lyrics", "drug"), 3);
|
||||
pq.setSlop(5);
|
||||
assertSaneScoring(pq, is);
|
||||
is.close();
|
||||
ir.close();
|
||||
dir.close();
|
||||
}
|
||||
|
|
|
@ -334,9 +334,6 @@ public class TestSort extends LuceneTestCase {
|
|||
full.reader.close();
|
||||
searchX.reader.close();
|
||||
searchY.reader.close();
|
||||
full.close();
|
||||
searchX.close();
|
||||
searchY.close();
|
||||
for (Directory dir : dirs)
|
||||
dir.close();
|
||||
super.tearDown();
|
||||
|
@ -559,7 +556,6 @@ public class TestSort extends LuceneTestCase {
|
|||
}
|
||||
assertFalse("Found sort results out of order", fail);
|
||||
searcher.getIndexReader().close();
|
||||
searcher.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -816,7 +812,6 @@ public class TestSort extends LuceneTestCase {
|
|||
new SortField ("string", SortField.Type.STRING),
|
||||
new SortField ("float", SortField.Type.FLOAT, true) );
|
||||
assertMatches (parallelSearcher, queryG, sort, "ZYXW");
|
||||
parallelSearcher.close();
|
||||
exec.shutdown();
|
||||
exec.awaitTermination(1000, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
@ -855,7 +850,6 @@ public class TestSort extends LuceneTestCase {
|
|||
new IndexReader[] {searchX.getIndexReader(),
|
||||
searchY.getIndexReader()}), exec);
|
||||
runMultiSorts(searcher, false);
|
||||
searcher.close();
|
||||
exec.shutdown();
|
||||
exec.awaitTermination(1000, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
@ -1284,7 +1278,6 @@ public class TestSort extends LuceneTestCase {
|
|||
// null sorts first
|
||||
assertEquals(1, hits.scoreDocs[0].doc);
|
||||
assertEquals(0, hits.scoreDocs[1].doc);
|
||||
s.close();
|
||||
r.close();
|
||||
dir.close();
|
||||
}
|
||||
|
@ -1308,7 +1301,6 @@ public class TestSort extends LuceneTestCase {
|
|||
IndexReader reader = IndexReader.open(indexStore);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
searcher.search(new MatchAllDocsQuery(), null, 500, sort);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
@ -1329,7 +1321,6 @@ public class TestSort extends LuceneTestCase {
|
|||
TotalHitCountCollector c = new TotalHitCountCollector();
|
||||
searcher.search(new MatchAllDocsQuery(), null, c);
|
||||
assertEquals(5, c.getTotalHits());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
|
|
@ -61,7 +61,6 @@ public class TestSubScorerFreqs extends LuceneTestCase {
|
|||
@AfterClass
|
||||
public static void finish() throws Exception {
|
||||
s.getIndexReader().close();
|
||||
s.close();
|
||||
s = null;
|
||||
dir.close();
|
||||
dir = null;
|
||||
|
|
|
@ -134,8 +134,6 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
|
|||
result = search.search(q, TermRangeFilter.newStringRange("id", medIP, medIP, T, T),
|
||||
numDocs).scoreDocs;
|
||||
assertEquals("med,med,T,T", 1, result.length);
|
||||
|
||||
search.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -212,7 +210,5 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
|
|||
result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, null, T, F),
|
||||
numDocs).scoreDocs;
|
||||
assertEquals("max,nul,T,T", 1, result.length);
|
||||
|
||||
search.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("A,B,C,D, only B in range", 1, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
initializeIndex(new String[] {"A", "B", "D"});
|
||||
|
@ -67,7 +66,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("A,B,D, only B in range", 1, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
addDoc("C");
|
||||
|
@ -75,7 +73,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("C added, still only B in range", 1, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -87,7 +84,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("A,B,C,D - A,B,C in range", 3, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
initializeIndex(new String[]{"A", "B", "D"});
|
||||
|
@ -95,7 +91,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("A,B,D - A and B in range", 2, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
|
||||
addDoc("C");
|
||||
|
@ -103,7 +98,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
searcher = new IndexSearcher(reader);
|
||||
hits = searcher.search(query, null, 1000).scoreDocs;
|
||||
assertEquals("C added - A, B, C in range", 3, hits.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -125,7 +119,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
query = TermRangeQuery.newStringRange("content", "B", null, true, false);
|
||||
assertTrue(query.getTermsEnum(terms) instanceof TermRangeTermsEnum);
|
||||
assertEquals(3, searcher.search(query, null, 1000).scoreDocs.length);
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -146,7 +139,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
} finally {
|
||||
BooleanQuery.setMaxClauseCount(savedClauseCount);
|
||||
}
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -292,7 +284,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
// until Lucene-38 is fixed, use this assert:
|
||||
//assertEquals("A,B,<empty string>,C,D => A, B & <empty string> are in range", 2, hits.length());
|
||||
|
||||
searcher.close();
|
||||
reader.close();
|
||||
initializeIndex(new String[] {"A", "B", "", "D"}, analyzer);
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -302,7 +293,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
assertEquals("A,B,<empty string>,D => A, B & <empty string> are in range", 3, numHits);
|
||||
// until Lucene-38 is fixed, use this assert:
|
||||
//assertEquals("A,B,<empty string>,D => A, B & <empty string> are in range", 2, hits.length());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
addDoc("C");
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -312,7 +302,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
assertEquals("C added, still A, B & <empty string> are in range", 3, numHits);
|
||||
// until Lucene-38 is fixed, use this assert
|
||||
//assertEquals("C added, still A, B & <empty string> are in range", 2, hits.length());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
|
@ -329,7 +318,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
assertEquals("A,B,<empty string>,C,D => A,B,<empty string>,C in range", 4, numHits);
|
||||
// until Lucene-38 is fixed, use this assert
|
||||
//assertEquals("A,B,<empty string>,C,D => A,B,<empty string>,C in range", 3, hits.length());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
initializeIndex(new String[]{"A", "B", "", "D"}, analyzer);
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -339,7 +327,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
assertEquals("A,B,<empty string>,D - A, B and <empty string> in range", 3, numHits);
|
||||
// until Lucene-38 is fixed, use this assert
|
||||
//assertEquals("A,B,<empty string>,D => A, B and <empty string> in range", 2, hits.length());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
addDoc("C");
|
||||
reader = IndexReader.open(dir);
|
||||
|
@ -349,7 +336,6 @@ public class TestTermRangeQuery extends LuceneTestCase {
|
|||
assertEquals("C added => A,B,<empty string>,C in range", 4, numHits);
|
||||
// until Lucene-38 is fixed, use this assert
|
||||
//assertEquals("C added => A,B,<empty string>,C in range", 3, hits.length());
|
||||
searcher.close();
|
||||
reader.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,6 @@ public class TestTermScorer extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
indexSearcher.close();
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -79,7 +79,6 @@ public class TestTermVectors extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
super.tearDown();
|
||||
|
|
|
@ -107,7 +107,6 @@ public class TestTimeLimitingCollector extends LuceneTestCase {
|
|||
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
searcher.close();
|
||||
reader.close();
|
||||
directory.close();
|
||||
counterThread.stopTimer();
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue