LUCENE-6286: Removed IndexSearcher methods that take a Filter object.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1662059 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2015-02-24 17:43:10 +00:00
parent 612e4544ef
commit 2cd3fb807d
124 changed files with 759 additions and 864 deletions

View File

@ -156,6 +156,9 @@ API Changes
performance would be achieved through CollationKeyAnalyzer or
ICUCollationKeyAnalyzer. (Adrien Grand)
* LUCENE-6286: Removed IndexSearcher methods that take a Filter object.
A BooleanQuery with a filter clause must be used instead. (Adrien Grand)
Other
* LUCENE-6248: Remove unused odd constants from StandardSyntaxParser.jj

View File

@ -114,7 +114,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
ts.end();
}
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
int[] ranks = new int[] { 0 };
compareRanks(hits, ranks);
}
@ -139,7 +139,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
ts.end();
}
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
int[] ranks = new int[] { 1, 2, 0 };
compareRanks(hits, ranks);
}

View File

@ -729,7 +729,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
}
}
ScoreDoc[] hits = searcher.search(new TermQuery(new Term(new String("content"), "aaa")), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(new Term(new String("content"), "aaa")), 1000).scoreDocs;
// First document should be #0
StoredDocument d = searcher.getIndexReader().document(hits[0].doc);
@ -738,20 +738,20 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
doTestHits(hits, 34, searcher.getIndexReader());
if (is40Index) {
hits = searcher.search(new TermQuery(new Term(new String("content5"), "aaa")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term(new String("content5"), "aaa")), 1000).scoreDocs;
doTestHits(hits, 34, searcher.getIndexReader());
hits = searcher.search(new TermQuery(new Term(new String("content6"), "aaa")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term(new String("content6"), "aaa")), 1000).scoreDocs;
doTestHits(hits, 34, searcher.getIndexReader());
}
hits = searcher.search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term("utf8", "\u0000")), 1000).scoreDocs;
assertEquals(34, hits.length);
hits = searcher.search(new TermQuery(new Term(new String("utf8"), "lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term(new String("utf8"), "lu\uD834\uDD1Ece\uD834\uDD60ne")), 1000).scoreDocs;
assertEquals(34, hits.length);
hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), 1000).scoreDocs;
assertEquals(34, hits.length);
reader.close();
@ -775,7 +775,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// make sure searching sees right # hits
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
StoredDocument d = searcher.getIndexReader().document(hits[0].doc);
assertEquals("wrong first document", "0", d.get("id"));
doTestHits(hits, 44, searcher.getIndexReader());
@ -790,7 +790,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 44, hits.length);
d = searcher.doc(hits[0].doc);
doTestHits(hits, 44, searcher.getIndexReader());
@ -802,7 +802,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
// make sure searching sees right # hits
DirectoryReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 34, hits.length);
StoredDocument d = searcher.doc(hits[0].doc);
assertEquals("wrong first document", "0", d.get("id"));
@ -816,7 +816,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 34, hits.length);
doTestHits(hits, 34, searcher.getIndexReader());
reader.close();

View File

@ -124,14 +124,14 @@ public abstract class ReadTask extends PerfTask {
TopFieldCollector collector = TopFieldCollector.create(sort, numHits,
true, withScore(),
withMaxScore());
searcher.search(q, null, collector);
searcher.search(q, collector);
hits = collector.topDocs();
} else {
hits = searcher.search(q, numHits);
}
} else {
Collector collector = createCollector();
searcher.search(q, null, collector);
searcher.search(q, collector);
//hits = collector.topDocs();
}

View File

@ -94,7 +94,7 @@ public class QualityBenchmark {
Query q = qqParser.parse(qq);
// search with this query
long t1 = System.currentTimeMillis();
TopDocs td = searcher.search(q,null,maxResults);
TopDocs td = searcher.search(q,maxResults);
long searchTime = System.currentTimeMillis()-t1;
//most likely we either submit or judge, but check both
if (judge!=null) {

View File

@ -46,7 +46,7 @@ public class FieldDoc extends ScoreDoc {
* the <code>value</code> method corresponding
* FieldComparator used to sort this field.
* @see Sort
* @see IndexSearcher#search(Query,Filter,int,Sort)
* @see IndexSearcher#search(Query,int,Sort)
*/
public Object[] fields;

View File

@ -27,7 +27,7 @@ import org.apache.lucene.util.PriorityQueue;
*
* @lucene.experimental
* @since 2.9
* @see IndexSearcher#search(Query,Filter,int,Sort)
* @see IndexSearcher#search(Query,int,Sort)
*/
public abstract class FieldValueHitQueue<T extends FieldValueHitQueue.Entry> extends PriorityQueue<T> {
@ -202,7 +202,7 @@ public abstract class FieldValueHitQueue<T extends FieldValueHitQueue.Entry> ext
*
* @param entry The Entry used to create a FieldDoc
* @return The newly created FieldDoc
* @see IndexSearcher#search(Query,Filter,int,Sort)
* @see IndexSearcher#search(Query,int,Sort)
*/
FieldDoc fillFields(final Entry entry) {
final int n = comparators.length;

View File

@ -47,8 +47,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
/** Implements search over a single IndexReader.
*
* <p>Applications usually need only call the inherited
* {@link #search(Query,int)}
* or {@link #search(Query,Filter,int)} methods. For
* {@link #search(Query,int)} method. For
* performance reasons, if your index is unchanging, you
* should share a single IndexSearcher instance across
* multiple searches instead of creating a new one
@ -209,11 +208,6 @@ public class IndexSearcher {
public Similarity getSimilarity() {
return similarity;
}
/** @lucene.internal */
protected Query wrapFilter(Query query, Filter filter) {
return (filter == null) ? query : new FilteredQuery(query, filter);
}
/** Finds the top <code>n</code>
* hits for <code>query</code> where all results are after a previous
@ -275,21 +269,6 @@ public class IndexSearcher {
}
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null,
* where all results are after a previous result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException {
return searchAfter(after, wrapFilter(query, filter), n);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>.
*
@ -301,34 +280,6 @@ public class IndexSearcher {
return searchAfter(null, query, n);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs search(Query query, Filter filter, int n)
throws IOException {
return search(wrapFilter(query, filter), n);
}
/** Lower-level search API.
*
* <p>{@link LeafCollector#collect(int)} is called for every matching
* document.
*
* @param query to match documents
* @param filter if non-null, used to permit documents to be collected.
* @param results to receive hits
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public void search(Query query, Filter filter, Collector results)
throws IOException {
search(wrapFilter(query, filter), results);
}
/** Lower-level search API.
*
* <p>{@link LeafCollector#collect(int)} is called for every matching document.
@ -340,30 +291,13 @@ public class IndexSearcher {
throws IOException {
search(leafContexts, createNormalizedWeight(query, results.needsScores()), results);
}
/** Search implementation with arbitrary sorting. Finds
* the top <code>n</code> hits for <code>query</code>, applying
* <code>filter</code> if non-null, and sorting the hits by the criteria in
* <code>sort</code>.
*
* <p>NOTE: this does not compute scores by default; use
* {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to
* control scoring.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort) throws IOException {
return search(query, filter, n, sort, false, false);
}
/** Search implementation with arbitrary sorting, plus
* control over whether hit scores and max score
* should be computed. Finds
* the top <code>n</code> hits for <code>query</code>, applying
* <code>filter</code> if non-null, and sorting the hits by the criteria in
* <code>sort</code>. If <code>doDocScores</code> is <code>true</code>
* the top <code>n</code> hits for <code>query</code>, and sorting
* the hits by the criteria in <code>sort</code>.
* If <code>doDocScores</code> is <code>true</code>
* then the score of each hit will be computed and
* returned. If <code>doMaxScore</code> is
* <code>true</code> then the maximum score over all
@ -372,37 +306,21 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
return searchAfter(null, query, filter, n, sort, doDocScores, doMaxScore);
}
/** Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null,
* where all results are after a previous result (<code>after</code>).
* <p>
* By passing the bottom result from a previous page as <code>after</code>,
* this method can be used for efficient 'deep-paging' across potentially
* large result sets.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
return searchAfter(after, query, filter, n, sort, false, false);
public TopFieldDocs search(Query query, int n,
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
return searchAfter(null, query, n, sort, doDocScores, doMaxScore);
}
/**
* Search implementation with arbitrary sorting and no filter.
* Search implementation with arbitrary sorting.
* @param query The query to search for
* @param n Return only the top n results
* @param sort The {@link org.apache.lucene.search.Sort} object
* @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance
* @throws IOException if there is a low-level I/O error
*/
public TopFieldDocs search(Query query, int n,
Sort sort) throws IOException {
return search(query, null, n, sort, false, false);
public TopFieldDocs search(Query query, int n, Sort sort) throws IOException {
return searchAfter(null, query, n, sort, false, false);
}
/** Finds the top <code>n</code>
@ -417,7 +335,7 @@ public class IndexSearcher {
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
return searchAfter(after, query, null, n, sort, false, false);
return searchAfter(after, query, n, sort, false, false);
}
/** Finds the top <code>n</code>
@ -436,14 +354,14 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int numHits, Sort sort,
public TopFieldDocs searchAfter(ScoreDoc after, Query query, int numHits, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return searchAfter((FieldDoc) after, wrapFilter(query, filter), numHits, sort, doDocScores, doMaxScore);
return searchAfter((FieldDoc) after, query, numHits, sort, doDocScores, doMaxScore);
}
private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort sort,

View File

@ -22,7 +22,6 @@ import org.apache.lucene.util.PriorityQueue;
import java.io.IOException;
/** Represents hits returned by {@link
* IndexSearcher#search(Query,Filter,int)} and {@link
* IndexSearcher#search(Query,int)}. */
public class TopDocs {

View File

@ -19,7 +19,7 @@ package org.apache.lucene.search;
/** Represents hits returned by {@link
* IndexSearcher#search(Query,Filter,int,Sort)}.
* IndexSearcher#search(Query,int,Sort)}.
*/
public class TopFieldDocs extends TopDocs {

View File

@ -40,8 +40,7 @@
* on implementing your own Query class, see <a href="#customQueriesExpert">Custom Queries -- Expert Level</a> below.
* <p>
* To perform a search, applications usually call {@link
* org.apache.lucene.search.IndexSearcher#search(Query,int)} or {@link
* org.apache.lucene.search.IndexSearcher#search(Query,Filter,int)}.
* org.apache.lucene.search.IndexSearcher#search(Query,int)}.
* <p>
* Once a Query has been created and submitted to the {@link org.apache.lucene.search.IndexSearcher IndexSearcher}, the scoring
* process begins. After some infrastructure setup, control finally passes to the {@link org.apache.lucene.search.Weight Weight}

View File

@ -61,7 +61,7 @@ public class TestDemo extends LuceneTestCase {
assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits);
Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1);
TopDocs hits = isearcher.search(query, 1);
assertEquals(1, hits.totalHits);
// Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) {
@ -73,7 +73,7 @@ public class TestDemo extends LuceneTestCase {
PhraseQuery phraseQuery = new PhraseQuery();
phraseQuery.add(new Term("fieldname", "to"));
phraseQuery.add(new Term("fieldname", "be"));
assertEquals(1, isearcher.search(phraseQuery, null, 1).totalHits);
assertEquals(1, isearcher.search(phraseQuery, 1).totalHits);
ireader.close();
directory.close();

View File

@ -57,7 +57,7 @@ public class TestSearch extends LuceneTestCase {
try {
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
assertTrue("score is not negative: " + hits[0].score,
hits[0].score < 0);
@ -147,7 +147,7 @@ public class TestSearch extends LuceneTestCase {
System.out.println("TEST: query=" + query);
}
hits = searcher.search(query, null, 1000, sort).scoreDocs;
hits = searcher.search(query, 1000, sort).scoreDocs;
out.println(hits.length + " total results");
for (int i = 0 ; i < hits.length && i < 10; i++) {

View File

@ -117,7 +117,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
final Sort sort = new Sort(SortField.FIELD_SCORE,
new SortField(ID_FIELD, SortField.Type.INT));
ScoreDoc[] hits = searcher.search(query, null, MAX_DOCS, sort).scoreDocs;
ScoreDoc[] hits = searcher.search(query, MAX_DOCS, sort).scoreDocs;
printHits(out, hits, searcher);
checkHits(hits, MAX_DOCS, searcher);
@ -130,7 +130,7 @@ public class TestSearchForDuplicates extends LuceneTestCase {
booleanQuery.add(new TermQuery(new Term(PRIORITY_FIELD, MED_PRIORITY)), BooleanClause.Occur.SHOULD);
out.println("Query: " + booleanQuery.toString(PRIORITY_FIELD));
hits = searcher.search(booleanQuery, null, MAX_DOCS, sort).scoreDocs;
hits = searcher.search(booleanQuery, MAX_DOCS, sort).scoreDocs;
printHits(out, hits, searcher);
checkHits(hits, MAX_DOCS, searcher);

View File

@ -107,7 +107,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase {
assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits);
Query query = new TermQuery(new Term("fieldname", "text"));
TopDocs hits = isearcher.search(query, null, 1);
TopDocs hits = isearcher.search(query, 1);
assertEquals(1, hits.totalHits);
// Iterate through the results:
for (int i = 0; i < hits.scoreDocs.length; i++) {

View File

@ -218,7 +218,7 @@ public class TestDocument extends LuceneTestCase {
Query query = new TermQuery(new Term("keyword", "test1"));
// ensure that queries return expected results without DateFilter first
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
doAssert(searcher.doc(hits[0].doc));
@ -250,7 +250,7 @@ public class TestDocument extends LuceneTestCase {
query.add(new Term("indexed_not_tokenized", "test1"));
query.add(new Term("indexed_not_tokenized", "test2"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
doAssert(searcher.doc(hits[0].doc));
@ -332,7 +332,7 @@ public class TestDocument extends LuceneTestCase {
Query query = new TermQuery(new Term("keyword", "test"));
// ensure that queries return expected results without DateFilter first
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
int result = 0;
for (int i = 0; i < 3; i++) {

View File

@ -304,17 +304,6 @@ public class TestCodecs extends LuceneTestCase {
dir.close();
}
private ScoreDoc[] search(final IndexWriter writer, final Query q, final int n) throws IOException {
final IndexReader reader = writer.getReader();
final IndexSearcher searcher = newSearcher(reader);
try {
return searcher.search(q, null, n).scoreDocs;
}
finally {
reader.close();
}
}
private class Verify extends Thread {
final Fields termsDict;
final FieldData[] fields;

View File

@ -672,7 +672,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
writer.close();
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(16, hits.length);
reader.close();
@ -690,7 +690,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
IndexReader rwReader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(rwReader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// Simplistic check: just verify only the past N segments_N's still
@ -708,7 +708,7 @@ public class TestDeletionPolicy extends LuceneTestCase {
// Work backwards in commits on what the expected
// count should be.
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(expectedCount, hits.length);
if (expectedCount == 0) {
expectedCount = 16;

View File

@ -272,7 +272,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase {
IndexSearcher searcher = newSearcher(refreshed);
ScoreDoc[] hits = searcher.search(
new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
null, 1000).scoreDocs;
1000).scoreDocs;
if (hits.length > 0) {
searcher.doc(hits[0].doc);
}

View File

@ -444,7 +444,7 @@ public class TestIndexWriter extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals(10, hits.length);
reader.close();
@ -466,7 +466,7 @@ public class TestIndexWriter extends LuceneTestCase {
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals(27, hits.length);
reader.close();

View File

@ -52,7 +52,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
Term searchTerm = new Term("content", "aaa");
DirectoryReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("first number of hits", 14, hits.length);
reader.close();
@ -65,7 +65,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
IndexReader r = DirectoryReader.open(dir);
searcher = newSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
r.close();
assertTrue("reader should have still been current", reader.isCurrent());
@ -77,7 +77,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
searcher = newSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("reader did not see changes after writer was closed", 47, hits.length);
r.close();
reader.close();
@ -108,7 +108,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
Term searchTerm = new Term("content", "aaa");
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("first number of hits", 14, hits.length);
reader.close();
@ -123,7 +123,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
reader.close();
@ -134,7 +134,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("saw changes after writer.abort", 14, hits.length);
reader.close();
@ -156,7 +156,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
}
IndexReader r = DirectoryReader.open(dir);
searcher = newSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("reader incorrectly sees changes from writer", 14, hits.length);
r.close();
}
@ -164,7 +164,7 @@ public class TestIndexWriterCommit extends LuceneTestCase {
writer.close();
IndexReader r = DirectoryReader.open(dir);
searcher = newSearcher(r);
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("didn't see changes after close", 218, hits.length);
r.close();

View File

@ -473,7 +473,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
private int getHitCount(Directory dir, Term term) throws IOException {
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
int hitCount = searcher.search(new TermQuery(term), null, 1000).totalHits;
int hitCount = searcher.search(new TermQuery(term), 1000).totalHits;
reader.close();
return hitCount;
}
@ -656,7 +656,7 @@ public class TestIndexWriterDelete extends LuceneTestCase {
IndexSearcher searcher = newSearcher(newReader);
ScoreDoc[] hits = null;
try {
hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
}
catch (IOException e) {
e.printStackTrace();

View File

@ -67,7 +67,7 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase {
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits);
// Sort by docID reversed:
hits = searcher.search(new TermQuery(new Term("field", "text")), null, 10, new Sort(new SortField(null, SortField.Type.DOC, true)));
hits = searcher.search(new TermQuery(new Term("field", "text")), 10, new Sort(new SortField(null, SortField.Type.DOC, true)));
assertEquals(IndexWriter.MAX_DOCS, hits.totalHits);
assertEquals(10, hits.scoreDocs.length);
assertEquals(IndexWriter.MAX_DOCS-1, hits.scoreDocs[0].doc);

View File

@ -202,7 +202,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
assertEquals("first docFreq", 57, reader.docFreq(searchTerm));
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs;
assertEquals("first number of hits", 57, hits.length);
reader.close();
@ -396,7 +396,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase {
searcher = newSearcher(reader);
try {
hits = searcher.search(new TermQuery(searchTerm), null, END_COUNT).scoreDocs;
hits = searcher.search(new TermQuery(searchTerm), END_COUNT).scoreDocs;
} catch (IOException e) {
e.printStackTrace(System.out);
fail(testName + ": exception when searching: " + e);

View File

@ -114,7 +114,7 @@ public class TestLazyProxSkipping extends LuceneTestCase {
PhraseQuery pq = new PhraseQuery();
pq.add(new Term(this.field, this.term1));
pq.add(new Term(this.field, this.term2));
return this.searcher.search(pq, null, 1000).scoreDocs;
return this.searcher.search(pq, 1000).scoreDocs;
}
private void performTest(int numHits) throws IOException {

View File

@ -107,7 +107,7 @@ public class TestManyFields extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
int totalHits = searcher.search(new TermQuery(new Term("field", "aaa")), null, 1).totalHits;
int totalHits = searcher.search(new TermQuery(new Term("field", "aaa")), 1).totalHits;
assertEquals(n*100, totalHits);
reader.close();

View File

@ -402,8 +402,8 @@ public class TestParallelCompositeReader extends LuceneTestCase {
}
private void queryTest(Query query) throws IOException {
ScoreDoc[] parallelHits = parallel.search(query, null, 1000).scoreDocs;
ScoreDoc[] singleHits = single.search(query, null, 1000).scoreDocs;
ScoreDoc[] parallelHits = parallel.search(query, 1000).scoreDocs;
ScoreDoc[] singleHits = single.search(query, 1000).scoreDocs;
assertEquals(parallelHits.length, singleHits.length);
for(int i = 0; i < parallelHits.length; i++) {
assertEquals(parallelHits[i].score, singleHits[i].score, 0.001f);

View File

@ -244,8 +244,8 @@ public class TestParallelLeafReader extends LuceneTestCase {
}
private void queryTest(Query query) throws IOException {
ScoreDoc[] parallelHits = parallel.search(query, null, 1000).scoreDocs;
ScoreDoc[] singleHits = single.search(query, null, 1000).scoreDocs;
ScoreDoc[] parallelHits = parallel.search(query, 1000).scoreDocs;
ScoreDoc[] singleHits = single.search(query, 1000).scoreDocs;
assertEquals(parallelHits.length, singleHits.length);
for(int i = 0; i < parallelHits.length; i++) {
assertEquals(parallelHits[i].score, singleHits[i].score, 0.001f);

View File

@ -132,11 +132,11 @@ public class TestBoolean2 extends LuceneTestCase {
// sometimes return a default impl around the scorer so that we can
// compare BS1 and BS2
TopScoreDocCollector collector = TopScoreDocCollector.create(1000);
searcher.search(query, null, collector);
searcher.search(query, collector);
ScoreDoc[] hits1 = collector.topDocs().scoreDocs;
collector = TopScoreDocCollector.create(1000);
searcher.search(query, null, collector);
searcher.search(query, collector);
ScoreDoc[] hits2 = collector.topDocs().scoreDocs;
assertEquals(mulFactor * collector.totalHits,
@ -285,13 +285,13 @@ public class TestBoolean2 extends LuceneTestCase {
TopFieldCollector collector = TopFieldCollector.create(sort, 1000,
false, true, true);
searcher.search(q1, null, collector);
searcher.search(q1, collector);
ScoreDoc[] hits1 = collector.topDocs().scoreDocs;
collector = TopFieldCollector.create(sort, 1000,
false, true, true);
searcher.search(q1, null, collector);
searcher.search(q1, collector);
ScoreDoc[] hits2 = collector.topDocs().scoreDocs;
tot+=hits2.length;
CheckHits.checkEqual(q1, hits1, hits2);

View File

@ -87,7 +87,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
public void verifyNrHits(Query q, int expected) throws Exception {
// bs1
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
if (expected != h.length) {
printHits(getTestName(), h, s);
}
@ -349,8 +349,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
// Can't use Hits because normalized scores will mess things
// up. The non-sorting version of search() that returns TopDocs
// will not normalize scores.
TopDocs top1 = s.search(q1,null,100);
TopDocs top2 = s.search(q2,null,100);
TopDocs top1 = s.search(q1,100);
TopDocs top2 = s.search(q2,100);
if (i < 100) {
QueryUtils.check(random(), q1,s);
QueryUtils.check(random(), q2,s);
@ -410,8 +410,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
BooleanQuery q2 = new BooleanQuery();
q2.add(new TermQuery(new Term("data", "1")), BooleanClause.Occur.SHOULD);
q2.setMinimumNumberShouldMatch(1);
TopDocs top1 = s.search(q1,null,100);
TopDocs top2 = s.search(q2,null,100);
TopDocs top1 = s.search(q1,100);
TopDocs top2 = s.search(q2,100);
assertSubsetOfSameScores(q2, top1, top2);
} finally {
s.setSimilarity(oldSimilarity);
@ -432,8 +432,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
BooleanQuery q2 = new BooleanQuery();
q2.add(new TermQuery(new Term("data", "1")), BooleanClause.Occur.SHOULD);
q2.add(new TermQuery(new Term("data", "Z")), BooleanClause.Occur.MUST_NOT);
TopDocs top1 = s.search(q1,null,100);
TopDocs top2 = s.search(q2,null,100);
TopDocs top1 = s.search(q1,100);
TopDocs top2 = s.search(q2,100);
assertSubsetOfSameScores(q2, top1, top2);
} finally {
s.setSimilarity(oldSimilarity);

View File

@ -52,7 +52,7 @@ public class TestBooleanOr extends LuceneTestCase {
private int search(Query q) throws IOException {
QueryUtils.check(random(), q,searcher);
return searcher.search(q, null, 1000).totalHits;
return searcher.search(q, 1000).totalHits;
}
public void testElements() throws IOException {

View File

@ -55,7 +55,7 @@ public class TestBooleanScorer extends LuceneTestCase {
query.add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT);
IndexSearcher indexSearcher = newSearcher(ir);
ScoreDoc[] hits = indexSearcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = indexSearcher.search(query, 1000).scoreDocs;
assertEquals("Number of matched documents", 2, hits.length);
ir.close();
directory.close();

View File

@ -71,16 +71,16 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
IOUtils.close(ir, dir);
super.tearDown();
}
private void assertFilterEquals(Filter f1, Filter f2) throws Exception {
Query query = new MatchAllDocsQuery();
TopDocs hits1 = is.search(query, f1, ir.maxDoc());
TopDocs hits2 = is.search(query, f2, ir.maxDoc());
TopDocs hits1 = is.search(new FilteredQuery(query, f1), ir.maxDoc());
TopDocs hits2 = is.search(new FilteredQuery(query, f2), ir.maxDoc());
assertEquals(hits1.totalHits, hits2.totalHits);
CheckHits.checkEqual(query, hits1.scoreDocs, hits2.scoreDocs);
// now do it again to confirm caching works
TopDocs hits3 = is.search(query, f1, ir.maxDoc());
TopDocs hits4 = is.search(query, f2, ir.maxDoc());
TopDocs hits3 = is.search(new FilteredQuery(query, f1), ir.maxDoc());
TopDocs hits4 = is.search(new FilteredQuery(query, f2), ir.maxDoc());
assertEquals(hits3.totalHits, hits4.totalHits);
CheckHits.checkEqual(query, hits3.scoreDocs, hits4.scoreDocs);
}
@ -319,7 +319,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, FilterCachingPolicy.ALWAYS_CACHE);
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1);
assertTrue(filter.ramBytesUsed() > 0);
assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits);
@ -356,7 +356,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
searcher = newSearcher(reader, false);
missCount = filter.missCount;
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1);
assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits);
// cache hit
@ -370,7 +370,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
reader = refreshReader(reader);
searcher = newSearcher(reader, false);
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1);
assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits);
missCount = filter.missCount;
assertTrue(missCount > 0);
@ -389,7 +389,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
reader = refreshReader(reader);
searcher = newSearcher(reader, false);
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1);
assertEquals("[query + filter] Should find 2 hits...", 2, docs.totalHits);
assertTrue(filter.missCount > missCount);
missCount = filter.missCount;
@ -405,7 +405,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase {
reader = refreshReader(reader);
searcher = newSearcher(reader, false);
docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1);
assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits);
// CWF reused the same entry (it dynamically applied the deletes):
assertEquals(missCount, filter.missCount);

View File

@ -144,12 +144,12 @@ public class TestConstantScoreQuery extends LuceneTestCase {
Query query = new ConstantScoreQuery(filterB);
IndexSearcher s = newSearcher(r);
assertEquals(1, s.search(query, filterB, 1).totalHits); // Query for field:b, Filter field:b
assertEquals(1, s.search(new FilteredQuery(query, filterB), 1).totalHits); // Query for field:b, Filter field:b
Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a"))));
query = new ConstantScoreQuery(filterA);
assertEquals(0, s.search(query, filterB, 1).totalHits); // Query field:b, Filter field:a
assertEquals(0, s.search(new FilteredQuery(query, filterB), 1).totalHits); // Query field:b, Filter field:a
r.close();
d.close();

View File

@ -112,7 +112,7 @@ public class TestCustomSearcherSort extends LuceneTestCase {
// make sure the documents returned by the search match the expected list
private void matchHits(IndexSearcher searcher, Sort sort) throws IOException {
// make a query without sorting first
ScoreDoc[] hitsByRank = searcher.search(query, null, Integer.MAX_VALUE).scoreDocs;
ScoreDoc[] hitsByRank = searcher.search(query, Integer.MAX_VALUE).scoreDocs;
checkHits(hitsByRank, "Sort by rank: "); // check for duplicates
Map<Integer,Integer> resultMap = new TreeMap<>();
// store hits in TreeMap - TreeMap does not allow duplicates; existing
@ -124,7 +124,7 @@ public class TestCustomSearcherSort extends LuceneTestCase {
}
// now make a query using the sort criteria
ScoreDoc[] resultSort = searcher.search(query, null, Integer.MAX_VALUE,
ScoreDoc[] resultSort = searcher.search(query, Integer.MAX_VALUE,
sort).scoreDocs;
checkHits(resultSort, "Sort by custom criteria: "); // check for duplicates
@ -192,23 +192,23 @@ public class TestCustomSearcherSort extends LuceneTestCase {
}
@Override
public TopFieldDocs search(Query query, Filter filter, int nDocs, Sort sort)
public TopFieldDocs search(Query query, int nDocs, Sort sort)
throws IOException {
BooleanQuery bq = new BooleanQuery();
bq.add(query, BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term("mandant", Integer.toString(switcher))),
BooleanClause.Occur.MUST);
return super.search(bq, filter, nDocs, sort);
return super.search(bq, nDocs, sort);
}
@Override
public TopDocs search(Query query, Filter filter, int nDocs)
public TopDocs search(Query query, int nDocs)
throws IOException {
BooleanQuery bq = new BooleanQuery();
bq.add(query, BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term("mandant", Integer.toString(switcher))),
BooleanClause.Occur.MUST);
return super.search(bq, filter, nDocs);
return super.search(bq, nDocs);
}
}

View File

@ -76,23 +76,23 @@ public class TestDateFilter extends LuceneTestCase {
ScoreDoc[] result;
// ensure that queries return expected results without DateFilter first
result = searcher.search(query1, null, 1000).scoreDocs;
result = searcher.search(query1, 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query2, null, 1000).scoreDocs;
result = searcher.search(query2, 1000).scoreDocs;
assertEquals(1, result.length);
// run queries with DateFilter
result = searcher.search(query1, df1, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query1, df1), 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query1, df2, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query1, df2), 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query2, df1, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query2, df1), 1000).scoreDocs;
assertEquals(1, result.length);
result = searcher.search(query2, df2, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query2, df2), 1000).scoreDocs;
assertEquals(0, result.length);
reader.close();
indexStore.close();
@ -140,23 +140,23 @@ public class TestDateFilter extends LuceneTestCase {
ScoreDoc[] result;
// ensure that queries return expected results without DateFilter first
result = searcher.search(query1, null, 1000).scoreDocs;
result = searcher.search(query1, 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query2, null, 1000).scoreDocs;
result = searcher.search(query2, 1000).scoreDocs;
assertEquals(1, result.length);
// run queries with DateFilter
result = searcher.search(query1, df1, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query1, df1), 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query1, df2, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query1, df2), 1000).scoreDocs;
assertEquals(0, result.length);
result = searcher.search(query2, df1, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query2, df1), 1000).scoreDocs;
assertEquals(1, result.length);
result = searcher.search(query2, df2, 1000).scoreDocs;
result = searcher.search(new FilteredQuery(query2, df2), 1000).scoreDocs;
assertEquals(0, result.length);
reader.close();
indexStore.close();

View File

@ -83,7 +83,7 @@ public class TestDateSort extends LuceneTestCase {
// Execute the search and process the search results.
String[] actualOrder = new String[5];
ScoreDoc[] hits = searcher.search(query, null, 1000, sort).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000, sort).scoreDocs;
for (int i = 0; i < hits.length; i++) {
StoredDocument document = searcher.doc(hits[i].doc);
String text = document.get(TEXT_FIELD);

View File

@ -209,7 +209,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q.add(tq("hed", "elephant"));
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("all docs should match " + q.toString(), 4, h.length);
@ -233,7 +233,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q.add(tq("dek", "elephant"));
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
@ -258,7 +258,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q.add(tq("dek", "elephant"));
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("all docs should match " + q.toString(), 4, h.length);
@ -281,7 +281,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
q.add(tq("dek", "elephant"));
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
@ -320,7 +320,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("3 docs should match " + q.toString(), 3, h.length);
@ -352,7 +352,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
}
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
assertEquals("4 docs should match " + q.toString(), 4, h.length);
@ -388,7 +388,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
}
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {
@ -442,7 +442,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
}
QueryUtils.check(random(), q, s);
ScoreDoc[] h = s.search(q, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
try {

View File

@ -133,7 +133,7 @@ public class TestDocIdSet extends LuceneTestCase {
}
};
Assert.assertEquals(0, searcher.search(new MatchAllDocsQuery(), f, 10).totalHits);
Assert.assertEquals(0, searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), 10).totalHits);
reader.close();
dir.close();
}
@ -179,7 +179,7 @@ public class TestDocIdSet extends LuceneTestCase {
}
};
Assert.assertEquals(0, searcher.search(new MatchAllDocsQuery(), f, 10).totalHits);
Assert.assertEquals(0, searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), 10).totalHits);
reader.close();
dir.close();
}

View File

@ -80,7 +80,7 @@ public class TestElevationComparator extends LuceneTestCase {
);
TopDocsCollector<Entry> topCollector = TopFieldCollector.create(sort, 50, false, true, true);
searcher.search(newq, null, topCollector);
searcher.search(newq, topCollector);
TopDocs topDocs = topCollector.topDocs(0, 10);
int nDocsReturned = topDocs.scoreDocs.length;

View File

@ -21,7 +21,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
@ -57,18 +56,18 @@ public class TestFieldCacheTermsFilter extends LuceneTestCase {
List<String> terms = new ArrayList<>();
terms.add("5");
results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs;
results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs;
assertEquals("Must match nothing", 0, results.length);
terms = new ArrayList<>();
terms.add("10");
results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs;
results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs;
assertEquals("Must match 1", 1, results.length);
terms = new ArrayList<>();
terms.add("10");
terms.add("20");
results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs;
results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs;
assertEquals("Must match 2", 2, results.length);
reader.close();

View File

@ -136,33 +136,33 @@ public class TestFilteredQuery extends LuceneTestCase {
private void tFilteredQuery(final boolean useRandomAccess) throws Exception {
Query filteredquery = new FilteredQuery(query, filter, randomFilterStrategy(random(), useRandomAccess));
ScoreDoc[] hits = searcher.search (filteredquery, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search (filteredquery, 1000).scoreDocs;
assertEquals (1, hits.length);
assertEquals (1, hits[0].doc);
QueryUtils.check(random(), filteredquery,searcher);
hits = searcher.search (filteredquery, null, 1000, new Sort(new SortField("sorter", SortField.Type.STRING))).scoreDocs;
hits = searcher.search (filteredquery, 1000, new Sort(new SortField("sorter", SortField.Type.STRING))).scoreDocs;
assertEquals (1, hits.length);
assertEquals (1, hits[0].doc);
filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "one")), filter, randomFilterStrategy(random(), useRandomAccess));
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
hits = searcher.search (filteredquery, 1000).scoreDocs;
assertEquals (2, hits.length);
QueryUtils.check(random(), filteredquery,searcher);
filteredquery = new FilteredQuery(new MatchAllDocsQuery(), filter, randomFilterStrategy(random(), useRandomAccess));
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
hits = searcher.search (filteredquery, 1000).scoreDocs;
assertEquals (2, hits.length);
QueryUtils.check(random(), filteredquery,searcher);
filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "x")), filter, randomFilterStrategy(random(), useRandomAccess));
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
hits = searcher.search (filteredquery, 1000).scoreDocs;
assertEquals (1, hits.length);
assertEquals (3, hits[0].doc);
QueryUtils.check(random(), filteredquery,searcher);
filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "y")), filter, randomFilterStrategy(random(), useRandomAccess));
hits = searcher.search (filteredquery, null, 1000).scoreDocs;
hits = searcher.search (filteredquery, 1000).scoreDocs;
assertEquals (0, hits.length);
QueryUtils.check(random(), filteredquery,searcher);
@ -209,8 +209,8 @@ public class TestFilteredQuery extends LuceneTestCase {
* Tests whether the scores of the two queries are the same.
*/
public void assertScoreEquals(Query q1, Query q2) throws Exception {
ScoreDoc[] hits1 = searcher.search (q1, null, 1000).scoreDocs;
ScoreDoc[] hits2 = searcher.search (q2, null, 1000).scoreDocs;
ScoreDoc[] hits1 = searcher.search (q1, 1000).scoreDocs;
ScoreDoc[] hits2 = searcher.search (q2, 1000).scoreDocs;
assertEquals(hits1.length, hits2.length);
@ -233,7 +233,7 @@ public class TestFilteredQuery extends LuceneTestCase {
"sorter", "b", "d", true, true);
Query filteredquery = new FilteredQuery(rq, filter, randomFilterStrategy(random(), useRandomAccess));
ScoreDoc[] hits = searcher.search(filteredquery, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(filteredquery, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(random(), filteredquery,searcher);
}
@ -251,7 +251,7 @@ public class TestFilteredQuery extends LuceneTestCase {
bq.add(query, BooleanClause.Occur.MUST);
query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), randomFilterStrategy(random(), useRandomAccess));
bq.add(query, BooleanClause.Occur.MUST);
ScoreDoc[] hits = searcher.search(bq, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(bq, 1000).scoreDocs;
assertEquals(0, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -269,7 +269,7 @@ public class TestFilteredQuery extends LuceneTestCase {
bq.add(query, BooleanClause.Occur.SHOULD);
query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), randomFilterStrategy(random(), useRandomAccess));
bq.add(query, BooleanClause.Occur.SHOULD);
ScoreDoc[] hits = searcher.search(bq, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(bq, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(random(), query,searcher);
}

View File

@ -75,7 +75,7 @@ public class TestFilteredSearch extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(reader);
ScoreDoc[] hits = indexSearcher.search(booleanQuery, filter, 1000).scoreDocs;
ScoreDoc[] hits = indexSearcher.search(new FilteredQuery(booleanQuery, filter), 1000).scoreDocs;
assertEquals("Number of matched documents", 1, hits.length);
reader.close();
}

View File

@ -48,7 +48,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
writer.close();
FuzzyQuery query = new FuzzyQuery(new Term("field", "abc"), FuzzyQuery.defaultMaxEdits, 1);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
reader.close();
directory.close();
@ -70,32 +70,32 @@ public class TestFuzzyQuery extends LuceneTestCase {
writer.close();
FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 0);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
// same with prefix
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 1);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 2);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 3);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 4);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(2, hits.length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 5);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 6);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
// test scoring
query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("3 documents should match", 3, hits.length);
List<String> order = Arrays.asList("bbbbb","abbbb","aabbb");
for (int i = 0; i < hits.length; i++) {
@ -107,7 +107,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
// test pq size by supplying maxExpansions=2
// This query would normally return 3 documents, because 3 terms match (see above):
query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMaxEdits, 0, 2, false);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("only 2 documents should match", 2, hits.length);
order = Arrays.asList("bbbbb","abbbb");
for (int i = 0; i < hits.length; i++) {
@ -118,15 +118,15 @@ public class TestFuzzyQuery extends LuceneTestCase {
// not similar enough:
query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMaxEdits, 0); // edit distance to "aaaaa" = 3
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// query identical to a word in the index:
query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
// default allows for up to two edits:
@ -135,7 +135,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
// query similar to a word in the index:
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab"));
@ -143,63 +143,63 @@ public class TestFuzzyQuery extends LuceneTestCase {
// now with prefix
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 1);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab"));
assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 2);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab"));
assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 3);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab"));
assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 4);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(2, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa"));
assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab"));
query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 5);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd"));
// now with prefix
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 1);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 2);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 3);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 4);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd"));
query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 5);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// different field = no match:
query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMaxEdits, 0);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
reader.close();
@ -234,7 +234,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
FuzzyQuery query = new FuzzyQuery(new Term("field", "WEBER"), 2, 1);
//query.setRewriteMethod(FuzzyQuery.SCORING_BOOLEAN_QUERY_REWRITE);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(8, hits.length);
reader.close();
@ -296,7 +296,7 @@ public class TestFuzzyQuery extends LuceneTestCase {
FuzzyQuery query = new FuzzyQuery(new Term("field", "lucene"));
query.setRewriteMethod(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(50));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
// normally, 'Lucenne' would be the first result as IDF will skew the score.
assertEquals("Lucene", reader.document(hits[0].doc).get("field"));

View File

@ -81,10 +81,6 @@ public class TestIndexSearcher extends LuceneTestCase {
null,
new Sort(new SortField("field2", SortField.Type.STRING))
};
Filter filters[] = new Filter[] {
null,
new QueryWrapperFilter(new TermQuery(new Term("field2", "true")))
};
ScoreDoc afters[] = new ScoreDoc[] {
null,
new FieldDoc(0, 0f, new Object[] { new BytesRef("boo!") })
@ -94,24 +90,19 @@ public class TestIndexSearcher extends LuceneTestCase {
for (ScoreDoc after : afters) {
for (Query query : queries) {
for (Sort sort : sorts) {
for (Filter filter : filters) {
searcher.search(query, Integer.MAX_VALUE);
searcher.searchAfter(after, query, Integer.MAX_VALUE);
searcher.search(query, filter, Integer.MAX_VALUE);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE);
if (sort != null) {
searcher.search(query, Integer.MAX_VALUE, sort);
searcher.search(query, filter, Integer.MAX_VALUE, sort);
searcher.search(query, filter, Integer.MAX_VALUE, sort, true, true);
searcher.search(query, filter, Integer.MAX_VALUE, sort, true, false);
searcher.search(query, filter, Integer.MAX_VALUE, sort, false, true);
searcher.search(query, filter, Integer.MAX_VALUE, sort, false, false);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, true, true);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, true, false);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, false, true);
searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, false, false);
}
searcher.search(query, Integer.MAX_VALUE);
searcher.searchAfter(after, query, Integer.MAX_VALUE);
if (sort != null) {
searcher.search(query, Integer.MAX_VALUE, sort);
searcher.search(query, Integer.MAX_VALUE, sort, true, true);
searcher.search(query, Integer.MAX_VALUE, sort, true, false);
searcher.search(query, Integer.MAX_VALUE, sort, false, true);
searcher.search(query, Integer.MAX_VALUE, sort, false, false);
searcher.searchAfter(after, query, Integer.MAX_VALUE, sort);
searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, true, true);
searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, true, false);
searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, false, true);
searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, false, false);
}
}
}

View File

@ -54,7 +54,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits;
hits = is.search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs;
assertEquals(3, hits.length);
assertEquals("one", is.doc(hits[0].doc).get("key"));
assertEquals("two", is.doc(hits[1].doc).get("key"));
@ -65,13 +65,13 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
hits = is.search(bq, null, 1000).scoreDocs;
hits = is.search(bq, 1000).scoreDocs;
assertEquals(3, hits.length);
bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST);
hits = is.search(bq, null, 1000).scoreDocs;
hits = is.search(bq, 1000).scoreDocs;
assertEquals(1, hits.length);
iw.deleteDocuments(new Term("key", "one"));
@ -79,7 +79,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
ir = DirectoryReader.open(iw, true);
is = newSearcher(ir);
hits = is.search(new MatchAllDocsQuery(), null, 1000).scoreDocs;
hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs;
assertEquals(2, hits.length);
iw.close();

View File

@ -90,9 +90,9 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
.toString());
ScoreDoc[] result;
result = searcher.search(query1, null, 1000).scoreDocs;
result = searcher.search(query1, 1000).scoreDocs;
assertEquals(2, result.length);
result = searcher.search(query2, null, 1000).scoreDocs;
result = searcher.search(query2, 1000).scoreDocs;
assertEquals(0, result.length);
// search for "blue* pizza":
@ -110,13 +110,13 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
query3.add(termsWithPrefix.toArray(new Term[0]));
query3.add(new Term("body", "pizza"));
result = searcher.search(query3, null, 1000).scoreDocs;
result = searcher.search(query3, 1000).scoreDocs;
assertEquals(2, result.length); // blueberry pizza, bluebird pizza
assertEquals("body:\"(blueberry bluebird) pizza\"", query3.toString());
// test slop:
query3.setSlop(1);
result = searcher.search(query3, null, 1000).scoreDocs;
result = searcher.search(query3, 1000).scoreDocs;
// just make sure no exc:
searcher.explain(query3, 0);
@ -224,7 +224,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
q.add(trouble, BooleanClause.Occur.MUST);
// exception will be thrown here without fix
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals("Wrong number of hits", 2, hits.length);
@ -256,7 +256,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
q.add(trouble, BooleanClause.Occur.MUST);
// exception will be thrown here without fix for #35626:
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals("Wrong number of hits", 0, hits.length);
writer.close();
reader.close();
@ -275,7 +275,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
q.add(new Term("body", "a"));
q.add(new Term[] {new Term("body", "nope"), new Term("body", "nope")});
assertEquals("Wrong number of hits", 0,
searcher.search(q, null, 1).totalHits);
searcher.search(q, 1).totalHits);
// just make sure no exc:
searcher.explain(q, 0);

View File

@ -143,7 +143,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
// some hits match more terms then others, score should be the same
result = search.search(csrq("data", "1", "6", T, T), null, 1000).scoreDocs;
result = search.search(csrq("data", "1", "6", T, T), 1000).scoreDocs;
int numHits = result.length;
assertEquals("wrong number of results", 6, numHits);
float score = result[0].score;
@ -152,7 +152,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
result[i].score, SCORE_COMP_THRESH);
}
result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), null, 1000).scoreDocs;
result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), 1000).scoreDocs;
numHits = result.length;
assertEquals("wrong number of results", 6, numHits);
for (int i = 0; i < numHits; i++) {
@ -160,7 +160,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
result[i].score, SCORE_COMP_THRESH);
}
result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, 1000).scoreDocs;
result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), 1000).scoreDocs;
numHits = result.length;
assertEquals("wrong number of results", 6, numHits);
for (int i = 0; i < numHits; i++) {
@ -182,7 +182,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
BooleanQuery bq = new BooleanQuery();
bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc
bq.add(csrq("data", "#", "#", T, T), BooleanClause.Occur.SHOULD); // hits no docs
result = search.search(bq, null, 1000).scoreDocs;
result = search.search(bq, 1000).scoreDocs;
int numHits = result.length;
assertEquals("wrong number of results", 1, numHits);
float score = result[0].score;
@ -194,7 +194,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
bq = new BooleanQuery();
bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc
bq.add(csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), BooleanClause.Occur.SHOULD); // hits no docs
result = search.search(bq, null, 1000).scoreDocs;
result = search.search(bq, 1000).scoreDocs;
numHits = result.length;
assertEquals("wrong number of results", 1, numHits);
for (int i = 0; i < numHits; i++) {
@ -205,7 +205,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
bq = new BooleanQuery();
bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc
bq.add(csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), BooleanClause.Occur.SHOULD); // hits no docs
result = search.search(bq, null, 1000).scoreDocs;
result = search.search(bq, 1000).scoreDocs;
numHits = result.length;
assertEquals("wrong number of results", 1, numHits);
for (int i = 0; i < numHits; i++) {
@ -226,7 +226,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
search.setSimilarity(new DefaultSimilarity());
Query q = csrq("data", "1", "6", T, T);
q.setBoost(100);
search.search(q, null, new SimpleCollector() {
search.search(q, new SimpleCollector() {
private int base = 0;
private Scorer scorer;
@Override
@ -259,7 +259,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
bq.add(q1, BooleanClause.Occur.SHOULD);
bq.add(q2, BooleanClause.Occur.SHOULD);
ScoreDoc[] hits = search.search(bq, null, 1000).scoreDocs;
ScoreDoc[] hits = search.search(bq, 1000).scoreDocs;
Assert.assertEquals(1, hits[0].doc);
Assert.assertEquals(0, hits[1].doc);
assertTrue(hits[0].score > hits[1].score);
@ -271,7 +271,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
bq.add(q1, BooleanClause.Occur.SHOULD);
bq.add(q2, BooleanClause.Occur.SHOULD);
hits = search.search(bq, null, 1000).scoreDocs;
hits = search.search(bq, 1000).scoreDocs;
Assert.assertEquals(1, hits[0].doc);
Assert.assertEquals(0, hits[1].doc);
assertTrue(hits[0].score > hits[1].score);
@ -283,7 +283,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
bq.add(q1, BooleanClause.Occur.SHOULD);
bq.add(q2, BooleanClause.Occur.SHOULD);
hits = search.search(bq, null, 1000).scoreDocs;
hits = search.search(bq, 1000).scoreDocs;
Assert.assertEquals(0, hits[0].doc);
Assert.assertEquals(1, hits[1].doc);
assertTrue(hits[0].score > hits[1].score);
@ -300,7 +300,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
Query rq = TermRangeQuery.newStringRange("data", "1", "4", T, T);
ScoreDoc[] expected = search.search(rq, null, 1000).scoreDocs;
ScoreDoc[] expected = search.search(rq, 1000).scoreDocs;
int numHits = expected.length;
// now do a boolean where which also contains a
@ -310,7 +310,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
q.add(rq, BooleanClause.Occur.MUST);// T, F);
q.add(csrq("data", "1", "6", T, T), BooleanClause.Occur.MUST);// T, F);
ScoreDoc[] actual = search.search(q, null, 1000).scoreDocs;
ScoreDoc[] actual = search.search(q, 1000).scoreDocs;
assertEquals("wrong numebr of hits", numHits, actual.length);
for (int i = 0; i < numHits; i++) {
@ -344,110 +344,110 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
// test id, bounded on both ends
result = search.search(csrq("id", minIP, maxIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, T, T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(csrq("id", minIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(csrq("id", minIP, maxIP, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, T, F), numDocs).scoreDocs;
assertEquals("all but last", numDocs - 1, result.length);
result = search.search(csrq("id", minIP, maxIP, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("all but last", numDocs - 1, result.length);
result = search.search(csrq("id", minIP, maxIP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, F, T), numDocs).scoreDocs;
assertEquals("all but first", numDocs - 1, result.length);
result = search.search(csrq("id", minIP, maxIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("all but first", numDocs - 1, result.length);
result = search.search(csrq("id", minIP, maxIP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, F, F), numDocs).scoreDocs;
assertEquals("all but ends", numDocs - 2, result.length);
result = search.search(csrq("id", minIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("all but ends", numDocs - 2, result.length);
result = search.search(csrq("id", medIP, maxIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, maxIP, T, T), numDocs).scoreDocs;
assertEquals("med and up", 1 + maxId - medId, result.length);
result = search.search(csrq("id", medIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("med and up", 1 + maxId - medId, result.length);
result = search.search(csrq("id", minIP, medIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, medIP, T, T), numDocs).scoreDocs;
assertEquals("up to med", 1 + medId - minId, result.length);
result = search.search(csrq("id", minIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("up to med", 1 + medId - minId, result.length);
// unbounded id
result = search.search(csrq("id", minIP, null, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, null, T, F), numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
result = search.search(csrq("id", null, maxIP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", null, maxIP, F, T), numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
result = search.search(csrq("id", minIP, null, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, null, F, F), numDocs).scoreDocs;
assertEquals("not min, but up", numDocs - 1, result.length);
result = search.search(csrq("id", null, maxIP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", null, maxIP, F, F), numDocs).scoreDocs;
assertEquals("not max, but down", numDocs - 1, result.length);
result = search.search(csrq("id", medIP, maxIP, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, maxIP, T, F), numDocs).scoreDocs;
assertEquals("med and up, not max", maxId - medId, result.length);
result = search.search(csrq("id", minIP, medIP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, medIP, F, T), numDocs).scoreDocs;
assertEquals("not min, up to med", medId - minId, result.length);
// very small sets
result = search.search(csrq("id", minIP, minIP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, minIP, F, F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(csrq("id", minIP, minIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, minIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(csrq("id", medIP, medIP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, medIP, F, F), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
result = search.search(csrq("id", medIP, medIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, medIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
result = search.search(csrq("id", maxIP, maxIP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, maxIP, F, F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(csrq("id", maxIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(csrq("id", minIP, minIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, minIP, T, T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(csrq("id", minIP, minIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", minIP, minIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(csrq("id", null, minIP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", null, minIP, F, T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(csrq("id", null, minIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", null, minIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(csrq("id", maxIP, maxIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, maxIP, T, T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(csrq("id", maxIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(csrq("id", maxIP, null, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, null, T, F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
result = search.search(csrq("id", maxIP, null, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", maxIP, null, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
result = search.search(csrq("id", medIP, medIP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, medIP, T, T), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
result = search.search(csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs;
result = search.search(csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
}
@ -469,47 +469,47 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
// test extremes, bounded on both ends
result = search.search(csrq("rand", minRP, maxRP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, maxRP, T, T), numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(csrq("rand", minRP, maxRP, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, maxRP, T, F), numDocs).scoreDocs;
assertEquals("all but biggest", numDocs - 1, result.length);
result = search.search(csrq("rand", minRP, maxRP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, maxRP, F, T), numDocs).scoreDocs;
assertEquals("all but smallest", numDocs - 1, result.length);
result = search.search(csrq("rand", minRP, maxRP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, maxRP, F, F), numDocs).scoreDocs;
assertEquals("all but extremes", numDocs - 2, result.length);
// unbounded
result = search.search(csrq("rand", minRP, null, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, null, T, F), numDocs).scoreDocs;
assertEquals("smallest and up", numDocs, result.length);
result = search.search(csrq("rand", null, maxRP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", null, maxRP, F, T), numDocs).scoreDocs;
assertEquals("biggest and down", numDocs, result.length);
result = search.search(csrq("rand", minRP, null, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, null, F, F), numDocs).scoreDocs;
assertEquals("not smallest, but up", numDocs - 1, result.length);
result = search.search(csrq("rand", null, maxRP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", null, maxRP, F, F), numDocs).scoreDocs;
assertEquals("not biggest, but down", numDocs - 1, result.length);
// very small sets
result = search.search(csrq("rand", minRP, minRP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, minRP, F, F), numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(csrq("rand", maxRP, maxRP, F, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", maxRP, maxRP, F, F), numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(csrq("rand", minRP, minRP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", minRP, minRP, T, T), numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(csrq("rand", null, minRP, F, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", null, minRP, F, T), numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(csrq("rand", maxRP, maxRP, T, T), null, numDocs).scoreDocs;
result = search.search(csrq("rand", maxRP, maxRP, T, T), numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(csrq("rand", maxRP, null, T, F), null, numDocs).scoreDocs;
result = search.search(csrq("rand", maxRP, null, T, F), numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
}
}

View File

@ -80,7 +80,7 @@ public class TestNeedsScores extends LuceneTestCase {
Query query = new MatchAllDocsQuery();
Query term = new TermQuery(new Term("field", "this"));
Filter filter = new QueryWrapperFilter(new AssertNeedsScores(term, false));
assertEquals(5, searcher.search(query, filter, 5).totalHits);
assertEquals(5, searcher.search(new FilteredQuery(query, filter), 5).totalHits);
}
/** when not sorting by score */

View File

@ -48,7 +48,7 @@ public class TestNot extends LuceneTestCase {
query.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("field", "b")), BooleanClause.Occur.MUST_NOT);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
writer.close();
reader.close();

View File

@ -158,16 +158,16 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
case 0:
type = " (constant score filter rewrite)";
q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
break;
case 1:
type = " (constant score boolean rewrite)";
q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
break;
case 2:
type = " (filter)";
topDocs = searcher.search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), noDocs, Sort.INDEXORDER);
break;
default:
return;
@ -222,7 +222,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
int count=3000;
int upper=(count-1)*distance + (distance/3) + startOffset;
NumericRangeQuery<Integer> q=NumericRangeQuery.newIntRange(field, precisionStep, null, upper, true, true);
TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
@ -232,7 +232,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue());
q=NumericRangeQuery.newIntRange(field, precisionStep, null, upper, false, true);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
@ -262,7 +262,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
int count=3000;
int lower=(count-1)*distance + (distance/3) +startOffset;
NumericRangeQuery<Integer> q=NumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, true);
TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
@ -272,7 +272,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue());
q=NumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, false);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
@ -550,7 +550,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
Filter tf=NumericRangeFilter.newFloatRange(field, precisionStep,
NumericUtils.sortableIntToFloat(lower), NumericUtils.sortableIntToFloat(upper), true, true);
tTopDocs = searcher.search(new MatchAllDocsQuery(), tf, 1);
tTopDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), tf), 1);
assertEquals("Returned count of range filter must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
}

View File

@ -167,16 +167,16 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
case 0:
type = " (constant score filter rewrite)";
q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
break;
case 1:
type = " (constant score boolean rewrite)";
q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
break;
case 2:
type = " (filter)";
topDocs = searcher.search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), noDocs, Sort.INDEXORDER);
break;
default:
return;
@ -239,7 +239,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
int count=3000;
long upper=(count-1)*distance + (distance/3) + startOffset;
NumericRangeQuery<Long> q=NumericRangeQuery.newLongRange(field, precisionStep, null, upper, true, true);
TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
@ -249,7 +249,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
q=NumericRangeQuery.newLongRange(field, precisionStep, null, upper, false, true);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", count, sd.length );
@ -284,7 +284,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
int count=3000;
long lower=(count-1)*distance + (distance/3) +startOffset;
NumericRangeQuery<Long> q=NumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, true);
TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
@ -294,7 +294,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() );
q=NumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, false);
topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER);
topDocs = searcher.search(q, noDocs, Sort.INDEXORDER);
sd = topDocs.scoreDocs;
assertNotNull(sd);
assertEquals("Score doc count", noDocs-count, sd.length );
@ -587,7 +587,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
Filter tf=NumericRangeFilter.newDoubleRange(field, precisionStep,
NumericUtils.sortableLongToDouble(lower), NumericUtils.sortableLongToDouble(upper), true, true);
tTopDocs = searcher.search(new MatchAllDocsQuery(), tf, 1);
tTopDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), tf), 1);
assertEquals("Returned count of range filter must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits );
}

View File

@ -88,10 +88,10 @@ public class TestPhrasePrefixQuery extends LuceneTestCase {
query2.add(termsWithPrefix.toArray(new Term[0]));
ScoreDoc[] result;
result = searcher.search(query1, null, 1000).scoreDocs;
result = searcher.search(query1, 1000).scoreDocs;
assertEquals(2, result.length);
result = searcher.search(query2, null, 1000).scoreDocs;
result = searcher.search(query2, 1000).scoreDocs;
assertEquals(0, result.length);
reader.close();
indexStore.close();

View File

@ -113,7 +113,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(2);
query.add(new Term("field", "one"));
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -122,7 +122,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(3);
query.add(new Term("field", "one"));
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -134,7 +134,7 @@ public class TestPhraseQuery extends LuceneTestCase {
// slop is zero by default
query.add(new Term("field", "four"));
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("exact match", 1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -142,7 +142,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query = new PhraseQuery();
query.add(new Term("field", "two"));
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("reverse not exact", 0, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -152,7 +152,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(1);
query.add(new Term("field", "one"));
query.add(new Term("field", "two"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("in order", 1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -163,7 +163,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(1);
query.add(new Term("field", "two"));
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("reversed, slop not 2 or more", 0, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -175,7 +175,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(2); // must be at least two for reverse order match
query.add(new Term("field", "two"));
query.add(new Term("field", "one"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -184,7 +184,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(2);
query.add(new Term("field", "three"));
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("not sloppy enough", 0, hits.length);
QueryUtils.check(random(), query,searcher);
@ -199,7 +199,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
query.add(new Term("field", "three"));
query.add(new Term("field", "five"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("two total moves", 1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -209,13 +209,13 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "five"));
query.add(new Term("field", "three"));
query.add(new Term("field", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("slop of 5 not close enough", 0, hits.length);
QueryUtils.check(random(), query,searcher);
query.setSlop(6);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("slop of 6 just right", 1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -238,7 +238,7 @@ public class TestPhraseQuery extends LuceneTestCase {
PhraseQuery query = new PhraseQuery();
query.add(new Term("field","stop"));
query.add(new Term("field","words"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(random(), query,searcher);
@ -267,7 +267,7 @@ public class TestPhraseQuery extends LuceneTestCase {
PhraseQuery phraseQuery = new PhraseQuery();
phraseQuery.add(new Term("source", "marketing"));
phraseQuery.add(new Term("source", "info"));
ScoreDoc[] hits = searcher.search(phraseQuery, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(phraseQuery, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(random(), phraseQuery,searcher);
@ -276,7 +276,7 @@ public class TestPhraseQuery extends LuceneTestCase {
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(termQuery, BooleanClause.Occur.MUST);
booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST);
hits = searcher.search(booleanQuery, null, 1000).scoreDocs;
hits = searcher.search(booleanQuery, 1000).scoreDocs;
assertEquals(1, hits.length);
QueryUtils.check(random(), termQuery,searcher);
@ -307,22 +307,22 @@ public class TestPhraseQuery extends LuceneTestCase {
phraseQuery.add(new Term("contents","map"));
phraseQuery.add(new Term("contents","entry"));
hits = searcher.search(termQuery, null, 1000).scoreDocs;
hits = searcher.search(termQuery, 1000).scoreDocs;
assertEquals(3, hits.length);
hits = searcher.search(phraseQuery, null, 1000).scoreDocs;
hits = searcher.search(phraseQuery, 1000).scoreDocs;
assertEquals(2, hits.length);
booleanQuery = new BooleanQuery();
booleanQuery.add(termQuery, BooleanClause.Occur.MUST);
booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST);
hits = searcher.search(booleanQuery, null, 1000).scoreDocs;
hits = searcher.search(booleanQuery, 1000).scoreDocs;
assertEquals(2, hits.length);
booleanQuery = new BooleanQuery();
booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST);
booleanQuery.add(termQuery, BooleanClause.Occur.MUST);
hits = searcher.search(booleanQuery, null, 1000).scoreDocs;
hits = searcher.search(booleanQuery, 1000).scoreDocs;
assertEquals(2, hits.length);
QueryUtils.check(random(), booleanQuery,searcher);
@ -359,7 +359,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "firstname"));
query.add(new Term("field", "lastname"));
query.setSlop(Integer.MAX_VALUE);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(3, hits.length);
// Make sure that those matches where the terms appear closer to
// each other get a higher score:
@ -407,13 +407,13 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("repeated", "part"));
query.setSlop(100);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("slop of 100 just right", 1, hits.length);
QueryUtils.check(random(), query,searcher);
query.setSlop(99);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("slop of 99 not enough", 0, hits.length);
QueryUtils.check(random(), query,searcher);
}
@ -426,7 +426,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("nonexist", "found"));
query.setSlop(2); // would be found this way
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("phrase without repetitions exists in 2 docs", 2, hits.length);
QueryUtils.check(random(), query,searcher);
@ -437,7 +437,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("nonexist", "exist"));
query.setSlop(1); // would be found
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("phrase with repetitions exists in two docs", 2, hits.length);
QueryUtils.check(random(), query,searcher);
@ -448,7 +448,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("nonexist", "phrase"));
query.setSlop(1000); // would not be found no matter how high the slop is
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length);
QueryUtils.check(random(), query,searcher);
@ -460,7 +460,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("nonexist", "exist"));
query.setSlop(1000); // would not be found no matter how high the slop is
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length);
QueryUtils.check(random(), query,searcher);
@ -481,7 +481,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(0); // to use exact phrase scorer
query.add(new Term("field", "two"));
query.add(new Term("field", "three"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("phrase found with exact phrase scorer", 1, hits.length);
float score0 = hits[0].score;
//System.out.println("(exact) field: two three: "+score0);
@ -489,7 +489,7 @@ public class TestPhraseQuery extends LuceneTestCase {
// search on non palyndrome, find phrase with slop 2, though no slop required here.
query.setSlop(2); // to use sloppy scorer
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
float score1 = hits[0].score;
//System.out.println("(sloppy) field: two three: "+score1);
@ -501,7 +501,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(2); // must be at least two for both ordered and reversed to match
query.add(new Term("palindrome", "two"));
query.add(new Term("palindrome", "three"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
//float score2 = hits[0].score;
//System.out.println("palindrome: two three: "+score2);
@ -515,7 +515,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.setSlop(2); // must be at least two for both ordered and reversed to match
query.add(new Term("palindrome", "three"));
query.add(new Term("palindrome", "two"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
//float score3 = hits[0].score;
//System.out.println("palindrome: three two: "+score3);
@ -542,7 +542,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("field", "one"));
query.add(new Term("field", "two"));
query.add(new Term("field", "three"));
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("phrase found with exact phrase scorer", 1, hits.length);
float score0 = hits[0].score;
//System.out.println("(exact) field: one two three: "+score0);
@ -553,7 +553,7 @@ public class TestPhraseQuery extends LuceneTestCase {
// search on non palyndrome, find phrase with slop 3, though no slop required here.
query.setSlop(4); // to use sloppy scorer
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
float score1 = hits[0].score;
//System.out.println("(sloppy) field: one two three: "+score1);
@ -566,7 +566,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("palindrome", "one"));
query.add(new Term("palindrome", "two"));
query.add(new Term("palindrome", "three"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
// just make sure no exc:
searcher.explain(query, 0);
@ -585,7 +585,7 @@ public class TestPhraseQuery extends LuceneTestCase {
query.add(new Term("palindrome", "three"));
query.add(new Term("palindrome", "two"));
query.add(new Term("palindrome", "one"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("just sloppy enough", 1, hits.length);
//float score3 = hits[0].score;
//System.out.println("palindrome: three two one: "+score3);

View File

@ -122,40 +122,40 @@ public class TestPositionIncrement extends LuceneTestCase {
q = new PhraseQuery();
q.add(new Term("field", "1"));
q.add(new Term("field", "2"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
// same as previous, just specify positions explicitely.
q = new PhraseQuery();
q.add(new Term("field", "1"),0);
q.add(new Term("field", "2"),1);
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
// specifying correct positions should find the phrase.
q = new PhraseQuery();
q.add(new Term("field", "1"),0);
q.add(new Term("field", "2"),2);
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "3"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "3"));
q.add(new Term("field", "4"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
// phrase query would find it when correct positions are specified.
q = new PhraseQuery();
q.add(new Term("field", "3"),0);
q.add(new Term("field", "4"),0);
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
// phrase query should fail for non existing searched term
@ -163,38 +163,38 @@ public class TestPositionIncrement extends LuceneTestCase {
q = new PhraseQuery();
q.add(new Term("field", "3"),0);
q.add(new Term("field", "9"),0);
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
// multi-phrase query should succed for non existing searched term
// because there exist another searched terms in the same searched position.
MultiPhraseQuery mq = new MultiPhraseQuery();
mq.add(new Term[]{new Term("field", "3"),new Term("field", "9")},0);
hits = searcher.search(mq, null, 1000).scoreDocs;
hits = searcher.search(mq, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "4"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "3"));
q.add(new Term("field", "5"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "4"));
q.add(new Term("field", "5"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
q = new PhraseQuery();
q.add(new Term("field", "2"));
q.add(new Term("field", "5"));
hits = searcher.search(q, null, 1000).scoreDocs;
hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
reader.close();

View File

@ -49,55 +49,55 @@ public class TestPrefixFilter extends LuceneTestCase {
PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers"));
Query query = new ConstantScoreQuery(filter);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals(4, hits.length);
// test middle of values
filter = new PrefixFilter(new Term("category", "/Computers/Mac"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(2, hits.length);
// test start of values
filter = new PrefixFilter(new Term("category", "/Computers/Linux"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
// test end of values
filter = new PrefixFilter(new Term("category", "/Computers/Windows"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(1, hits.length);
// test non-existant
filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// test non-existant, before values
filter = new PrefixFilter(new Term("category", "/Computers/AAA"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// test non-existant, after values
filter = new PrefixFilter(new Term("category", "/Computers/ZZZ"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
// test zero length prefix
filter = new PrefixFilter(new Term("category", ""));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(4, hits.length);
// test non existent field
filter = new PrefixFilter(new Term("nonexistantfield", "/Computers"));
query = new ConstantScoreQuery(filter);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals(0, hits.length);
writer.close();

View File

@ -85,12 +85,12 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
public void testPrefixQuery() throws Exception {
Query query = new PrefixQuery(new Term(FIELD, "tang"));
assertEquals("Number of matched documents", 2,
searcher.search(query, null, 1000).totalHits);
searcher.search(query, 1000).totalHits);
}
public void testTermQuery() throws Exception {
Query query = new TermQuery(new Term(FIELD, "tangfulin"));
assertEquals("Number of matched documents", 2,
searcher.search(query, null, 1000).totalHits);
searcher.search(query, 1000).totalHits);
}
public void testTermBooleanQuery() throws Exception {
BooleanQuery query = new BooleanQuery();
@ -99,7 +99,7 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
BooleanClause.Occur.SHOULD);
assertEquals("Number of matched documents", 2,
searcher.search(query, null, 1000).totalHits);
searcher.search(query, 1000).totalHits);
}
public void testPrefixBooleanQuery() throws Exception {
@ -109,6 +109,6 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase {
query.add(new TermQuery(new Term(FIELD, "notexistnames")),
BooleanClause.Occur.SHOULD);
assertEquals("Number of matched documents", 2,
searcher.search(query, null, 1000).totalHits);
searcher.search(query, 1000).totalHits);
}
}

View File

@ -48,17 +48,17 @@ public class TestPrefixQuery extends LuceneTestCase {
PrefixQuery query = new PrefixQuery(new Term("category", "/Computers"));
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("All documents in /Computers category and below", 3, hits.length);
query = new PrefixQuery(new Term("category", "/Computers/Mac"));
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("One in /Computers/Mac", 1, hits.length);
query = new PrefixQuery(new Term("category", ""));
Terms terms = MultiFields.getTerms(searcher.getIndexReader(), "category");
assertFalse(query.getTermsEnum(terms) instanceof PrefixTermsEnum);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("everything", 3, hits.length);
writer.close();
reader.close();

View File

@ -46,9 +46,9 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery);
IndexSearcher searcher = newSearcher(reader);
TopDocs hits = searcher.search(new MatchAllDocsQuery(), qwf, 10);
TopDocs hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
assertEquals(1, hits.totalHits);
// should not throw exception with complex primitive query
@ -58,26 +58,26 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
Occur.MUST_NOT);
qwf = new QueryWrapperFilter(termQuery);
hits = searcher.search(new MatchAllDocsQuery(), qwf, 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
assertEquals(1, hits.totalHits);
// should not throw exception with non primitive Query (doesn't implement
// Query#createWeight)
qwf = new QueryWrapperFilter(new FuzzyQuery(new Term("field", "valu")));
hits = searcher.search(new MatchAllDocsQuery(), qwf, 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, hits.totalHits);
hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
assertEquals(1, hits.totalHits);
// test a query with no hits
termQuery = new TermQuery(new Term("field", "not_exist"));
qwf = new QueryWrapperFilter(termQuery);
hits = searcher.search(new MatchAllDocsQuery(), qwf, 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(0, hits.totalHits);
hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10);
hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10);
assertEquals(0, hits.totalHits);
reader.close();
dir.close();
@ -113,8 +113,8 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
final IndexReader r = w.getReader();
w.close();
final TopDocs hits = newSearcher(r).search(new MatchAllDocsQuery(),
new QueryWrapperFilter(new TermQuery(new Term("field", "a"))),
final TopDocs hits = newSearcher(r).search(new FilteredQuery(new MatchAllDocsQuery(),
new QueryWrapperFilter(new TermQuery(new Term("field", "a")))),
numDocs);
assertEquals(aDocs.size(), hits.totalHits);
for(ScoreDoc sd: hits.scoreDocs) {
@ -141,7 +141,7 @@ public class TestQueryWrapperFilter extends LuceneTestCase {
for (int i = 0; i < 1000; i++) {
TermQuery termQuery = new TermQuery(new Term("field", English.intToEnglish(i)));
QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery);
TopDocs td = searcher.search(new MatchAllDocsQuery(), qwf, 10);
TopDocs td = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10);
assertEquals(1, td.totalHits);
}

View File

@ -180,11 +180,8 @@ public class TestSearchAfter extends LuceneTestCase {
// pages.
int n = atLeast(20);
for (int i = 0; i < n; i++) {
Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd")));
assertQuery(new MatchAllDocsQuery(), null);
assertQuery(new TermQuery(new Term("english", "one")), null);
assertQuery(new MatchAllDocsQuery(), odd);
assertQuery(new TermQuery(new Term("english", "four")), odd);
BooleanQuery bq = new BooleanQuery();
bq.add(new TermQuery(new Term("english", "one")), BooleanClause.Occur.SHOULD);
bq.add(new TermQuery(new Term("oddeven", "even")), BooleanClause.Occur.SHOULD);
@ -192,15 +189,15 @@ public class TestSearchAfter extends LuceneTestCase {
}
}
void assertQuery(Query query, Filter filter) throws Exception {
assertQuery(query, filter, null);
assertQuery(query, filter, Sort.RELEVANCE);
assertQuery(query, filter, Sort.INDEXORDER);
void assertQuery(Query query) throws Exception {
assertQuery(query, null);
assertQuery(query, Sort.RELEVANCE);
assertQuery(query, Sort.INDEXORDER);
for(SortField sortField : allSortFields) {
assertQuery(query, filter, new Sort(new SortField[] {sortField}));
assertQuery(query, new Sort(new SortField[] {sortField}));
}
for(int i=0;i<20;i++) {
assertQuery(query, filter, getRandomSort());
assertQuery(query, getRandomSort());
}
}
@ -212,21 +209,21 @@ public class TestSearchAfter extends LuceneTestCase {
return new Sort(sortFields);
}
void assertQuery(Query query, Filter filter, Sort sort) throws Exception {
void assertQuery(Query query, Sort sort) throws Exception {
int maxDoc = searcher.getIndexReader().maxDoc();
TopDocs all;
int pageSize = TestUtil.nextInt(random(), 1, maxDoc * 2);
if (VERBOSE) {
System.out.println("\nassertQuery " + (iter++) + ": query=" + query + " filter=" + filter + " sort=" + sort + " pageSize=" + pageSize);
System.out.println("\nassertQuery " + (iter++) + ": query=" + query + " sort=" + sort + " pageSize=" + pageSize);
}
final boolean doMaxScore = random().nextBoolean();
final boolean doScores = random().nextBoolean();
if (sort == null) {
all = searcher.search(query, filter, maxDoc);
all = searcher.search(query, maxDoc);
} else if (sort == Sort.RELEVANCE) {
all = searcher.search(query, filter, maxDoc, sort, true, doMaxScore);
all = searcher.search(query, maxDoc, sort, true, doMaxScore);
} else {
all = searcher.search(query, filter, maxDoc, sort, doScores, doMaxScore);
all = searcher.search(query, maxDoc, sort, doScores, doMaxScore);
}
if (VERBOSE) {
System.out.println(" all.totalHits=" + all.totalHits);
@ -243,15 +240,15 @@ public class TestSearchAfter extends LuceneTestCase {
if (VERBOSE) {
System.out.println(" iter lastBottom=" + lastBottom);
}
paged = searcher.searchAfter(lastBottom, query, filter, pageSize);
paged = searcher.searchAfter(lastBottom, query, pageSize);
} else {
if (VERBOSE) {
System.out.println(" iter lastBottom=" + lastBottom);
}
if (sort == Sort.RELEVANCE) {
paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort, true, doMaxScore);
paged = searcher.searchAfter(lastBottom, query, pageSize, sort, true, doMaxScore);
} else {
paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort, doScores, doMaxScore);
paged = searcher.searchAfter(lastBottom, query, pageSize, sort, doScores, doMaxScore);
}
}
if (VERBOSE) {

View File

@ -148,7 +148,7 @@ public class TestSortRandom extends LuceneTestCase {
}
final int hitCount = TestUtil.nextInt(random, 1, r.maxDoc() + 20);
final RandomFilter f = new RandomFilter(random, random.nextFloat(), docValues);
int queryType = random.nextInt(3);
int queryType = random.nextInt(2);
if (queryType == 0) {
// force out of order
BooleanQuery bq = new BooleanQuery();
@ -158,13 +158,10 @@ public class TestSortRandom extends LuceneTestCase {
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
// the clause instead of BQ.
bq.setMinimumNumberShouldMatch(1);
hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else if (queryType == 1) {
hits = s.search(new ConstantScoreQuery(f),
null, hitCount, sort, random.nextBoolean(), random.nextBoolean());
hits = s.search(new FilteredQuery(bq, f), hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else {
hits = s.search(new MatchAllDocsQuery(),
f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
hits = s.search(new ConstantScoreQuery(f),
hitCount, sort, random.nextBoolean(), random.nextBoolean());
}
if (VERBOSE) {

View File

@ -37,13 +37,13 @@ public class TestSortedNumericSortField extends LuceneTestCase {
Sort sort = new Sort();
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG));
TopDocs td = empty.search(query, null, 10, sort, true, true);
TopDocs td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
// for an empty index, any selector should work
for (SortedNumericSelector.Type v : SortedNumericSelector.Type.values()) {
sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG, false, v));
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
}
}

View File

@ -37,13 +37,13 @@ public class TestSortedSetSortField extends LuceneTestCase {
Sort sort = new Sort();
sort.setSort(new SortedSetSortField("sortedset", false));
TopDocs td = empty.search(query, null, 10, sort, true, true);
TopDocs td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
// for an empty index, any selector should work
for (SortedSetSelector.Type v : SortedSetSelector.Type.values()) {
sort.setSort(new SortedSetSortField("sortedset", false, v));
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
}
}

View File

@ -124,7 +124,7 @@ public class TestSubScorerFreqs extends LuceneTestCase {
public void testTermQuery() throws Exception {
TermQuery q = new TermQuery(new Term("f", "d"));
CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10));
s.search(q, null, c);
s.search(q, c);
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, c.docCounts.size());
for (int i = 0; i < maxDocs; i++) {
@ -164,7 +164,7 @@ public class TestSubScorerFreqs extends LuceneTestCase {
for (final Set<String> occur : occurList) {
CountingCollector c = new CountingCollector(TopScoreDocCollector.create(
10), occur);
s.search(query, null, c);
s.search(query, c);
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, c.docCounts.size());
boolean includeOptional = occur.contains("SHOULD");
@ -196,7 +196,7 @@ public class TestSubScorerFreqs extends LuceneTestCase {
q.add(new Term("f", "b"));
q.add(new Term("f", "c"));
CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10));
s.search(q, null, c);
s.search(q, c);
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, c.docCounts.size());
for (int i = 0; i < maxDocs; i++) {

View File

@ -54,83 +54,83 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
// test id, bounded on both ends
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, T)),
numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, F)),
numDocs).scoreDocs;
assertEquals("all but last", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, T)),
numDocs).scoreDocs;
assertEquals("all but first", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, F)),
numDocs).scoreDocs;
assertEquals("all but ends", numDocs - 2, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, T)),
numDocs).scoreDocs;
assertEquals("med and up", 1 + maxId - medId, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, medIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, medIP, T, T)),
numDocs).scoreDocs;
assertEquals("up to med", 1 + medId - minId, result.length);
// unbounded id
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, null, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, null, T, F)),
numDocs).scoreDocs;
assertEquals("min and up", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", null, maxIP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, maxIP, F, T)),
numDocs).scoreDocs;
assertEquals("max and down", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, null, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, null, F, F)),
numDocs).scoreDocs;
assertEquals("not min, but up", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", null, maxIP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, maxIP, F, F)),
numDocs).scoreDocs;
assertEquals("not max, but down", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, F)),
numDocs).scoreDocs;
assertEquals("med and up, not max", maxId - medId, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, medIP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, medIP, F, T)),
numDocs).scoreDocs;
assertEquals("not min, up to med", medId - minId, result.length);
// very small sets
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, minIP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, minIP, F, F)),
numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", medIP, medIP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, medIP, F, F)),
numDocs).scoreDocs;
assertEquals("med,med,F,F", 0, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, F, F)),
numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", minIP, minIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, minIP, T, T)),
numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", null, minIP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, minIP, F, T)),
numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, T, T)),
numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, null, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, null, T, F)),
numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("id", medIP, medIP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, medIP, T, T)),
numDocs).scoreDocs;
assertEquals("med,med,T,T", 1, result.length);
}
@ -153,60 +153,60 @@ public class TestTermRangeFilter extends BaseTestRangeFilter {
// test extremes, bounded on both ends
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, T)),
numDocs).scoreDocs;
assertEquals("find all", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, F)),
numDocs).scoreDocs;
assertEquals("all but biggest", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, T)),
numDocs).scoreDocs;
assertEquals("all but smallest", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, F)),
numDocs).scoreDocs;
assertEquals("all but extremes", numDocs - 2, result.length);
// unbounded
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, null, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, null, T, F)),
numDocs).scoreDocs;
assertEquals("smallest and up", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, T)),
numDocs).scoreDocs;
assertEquals("biggest and down", numDocs, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, null, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, null, F, F)),
numDocs).scoreDocs;
assertEquals("not smallest, but up", numDocs - 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, F)),
numDocs).scoreDocs;
assertEquals("not biggest, but down", numDocs - 1, result.length);
// very small sets
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, minRP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, minRP, F, F)),
numDocs).scoreDocs;
assertEquals("min,min,F,F", 0, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, F, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, F, F)),
numDocs).scoreDocs;
assertEquals("max,max,F,F", 0, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, minRP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, minRP, T, T)),
numDocs).scoreDocs;
assertEquals("min,min,T,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", null, minRP, F, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, minRP, F, T)),
numDocs).scoreDocs;
assertEquals("nul,min,F,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, T, T),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, T, T)),
numDocs).scoreDocs;
assertEquals("max,max,T,T", 1, result.length);
result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, null, T, F),
result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, null, T, F)),
numDocs).scoreDocs;
assertEquals("max,nul,T,T", 1, result.length);
}

View File

@ -56,21 +56,21 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[] {"A", "B", "C", "D"});
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("A,B,C,D, only B in range", 1, hits.length);
reader.close();
initializeIndex(new String[] {"A", "B", "D"});
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("A,B,D, only B in range", 1, hits.length);
reader.close();
addDoc("C");
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("C added, still only B in range", 1, hits.length);
reader.close();
}
@ -81,21 +81,21 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[]{"A", "B", "C", "D"});
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs;
assertEquals("A,B,C,D - A,B,C in range", 3, hits.length);
reader.close();
initializeIndex(new String[]{"A", "B", "D"});
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("A,B,D - A and B in range", 2, hits.length);
reader.close();
addDoc("C");
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(query, null, 1000).scoreDocs;
hits = searcher.search(query, 1000).scoreDocs;
assertEquals("C added - A, B, C in range", 3, hits.length);
reader.close();
}
@ -107,17 +107,17 @@ public class TestTermRangeQuery extends LuceneTestCase {
TermRangeQuery query = new TermRangeQuery("content", null, null, true, true);
Terms terms = MultiFields.getTerms(searcher.getIndexReader(), "content");
assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum);
assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length);
assertEquals(4, searcher.search(query, 1000).scoreDocs.length);
query = new TermRangeQuery("content", null, null, false, false);
assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum);
assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length);
assertEquals(4, searcher.search(query, 1000).scoreDocs.length);
query = TermRangeQuery.newStringRange("content", "", null, true, false);
assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum);
assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length);
assertEquals(4, searcher.search(query, 1000).scoreDocs.length);
// and now anothe one
query = TermRangeQuery.newStringRange("content", "B", null, true, false);
assertTrue(query.getTermsEnum(terms) instanceof TermRangeTermsEnum);
assertEquals(3, searcher.search(query, null, 1000).scoreDocs.length);
assertEquals(3, searcher.search(query, 1000).scoreDocs.length);
reader.close();
}
@ -276,7 +276,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[] {"A", "B", "", "C", "D"}, analyzer);
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
int numHits = searcher.search(query, null, 1000).totalHits;
int numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("A,B,<empty string>,C,D => A, B & <empty string> are in range", 3, numHits);
// until Lucene-38 is fixed, use this assert:
@ -286,7 +286,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[] {"A", "B", "", "D"}, analyzer);
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
numHits = searcher.search(query, null, 1000).totalHits;
numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("A,B,<empty string>,D => A, B & <empty string> are in range", 3, numHits);
// until Lucene-38 is fixed, use this assert:
@ -295,7 +295,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
addDoc("C");
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
numHits = searcher.search(query, null, 1000).totalHits;
numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("C added, still A, B & <empty string> are in range", 3, numHits);
// until Lucene-38 is fixed, use this assert
@ -311,7 +311,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[]{"A", "B", "","C", "D"}, analyzer);
IndexReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
int numHits = searcher.search(query, null, 1000).totalHits;
int numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("A,B,<empty string>,C,D => A,B,<empty string>,C in range", 4, numHits);
// until Lucene-38 is fixed, use this assert
@ -320,7 +320,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
initializeIndex(new String[]{"A", "B", "", "D"}, analyzer);
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
numHits = searcher.search(query, null, 1000).totalHits;
numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("A,B,<empty string>,D - A, B and <empty string> in range", 3, numHits);
// until Lucene-38 is fixed, use this assert
@ -329,7 +329,7 @@ public class TestTermRangeQuery extends LuceneTestCase {
addDoc("C");
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
numHits = searcher.search(query, null, 1000).totalHits;
numHits = searcher.search(query, 1000).totalHits;
// When Lucene-38 is fixed, use the assert on the next line:
assertEquals("C added => A,B,<empty string>,C in range", 4, numHits);
// until Lucene-38 is fixed, use this assert

View File

@ -105,7 +105,7 @@ public class TestTimeLimitingCollector extends LuceneTestCase {
query = booleanQuery;
// warm the searcher
searcher.search(query, null, 1000);
searcher.search(query, 1000);
}
@Override

View File

@ -41,7 +41,7 @@ public class TestTotalHitCountCollector extends LuceneTestCase {
IndexSearcher searcher = newSearcher(reader);
TotalHitCountCollector c = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), null, c);
searcher.search(new MatchAllDocsQuery(), c);
assertEquals(5, c.getTotalHits());
reader.close();
indexStore.close();

View File

@ -254,7 +254,7 @@ public class TestWildcard
private void assertMatches(IndexSearcher searcher, Query q, int expectedMatches)
throws IOException {
ScoreDoc[] result = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] result = searcher.search(q, 1000).scoreDocs;
assertEquals(expectedMatches, result.length);
}
@ -354,14 +354,14 @@ public class TestWildcard
// test queries that must find all
for (Query q : matchAll) {
if (VERBOSE) System.out.println("matchAll: q=" + q + " " + q.getClass().getName());
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals(docs.length, hits.length);
}
// test queries that must find none
for (Query q : matchNone) {
if (VERBOSE) System.out.println("matchNone: q=" + q + " " + q.getClass().getName());
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals(0, hits.length);
}
@ -370,7 +370,7 @@ public class TestWildcard
for (int j = 0; j < matchOneDocPrefix[i].length; j++) {
Query q = matchOneDocPrefix[i][j];
if (VERBOSE) System.out.println("match 1 prefix: doc="+docs[i]+" q="+q+" "+q.getClass().getName());
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1,hits.length);
assertEquals(i,hits[0].doc);
}
@ -381,7 +381,7 @@ public class TestWildcard
for (int j = 0; j < matchOneDocWild[i].length; j++) {
Query q = matchOneDocWild[i][j];
if (VERBOSE) System.out.println("match 1 wild: doc="+docs[i]+" q="+q+" "+q.getClass().getName());
ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
assertEquals(1,hits.length);
assertEquals(i,hits[0].doc);
}

View File

@ -141,7 +141,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
// all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
// and all the similarity factors are set to 1
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("should be 10 hits", hits.totalHits == 10);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -155,7 +155,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
}
// all should have score = 3 because adjacent terms have payloads of 2,4
// and all the similarity factors are set to 1
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertEquals("should be 100 hits", 100, hits.totalHits);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -179,7 +179,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
clauses[1] = q2;
query = new PayloadNearQuery(clauses, 10, false);
//System.out.println(query.toString());
assertEquals(12, searcher.search(query, null, 100).totalHits);
assertEquals(12, searcher.search(query, 100).totalHits);
/*
System.out.println(hits.totalHits);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -197,7 +197,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
QueryUtils.check(query);
// all 10 hits should have score = 3 because adjacent terms have payloads of 2,4
// and all the similarity factors are set to 1
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("should be 10 hits", hits.totalHits == 10);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -216,7 +216,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
query = newPhraseQuery("field", "twenty two", true, new MaxPayloadFunction());
QueryUtils.check(query);
// all 10 hits should have score = 4 (max payload value)
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("should be 10 hits", hits.totalHits == 10);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -235,7 +235,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
query = newPhraseQuery("field", "twenty two", true, new MinPayloadFunction());
QueryUtils.check(query);
// all 10 hits should have score = 2 (min payload value)
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("should be 10 hits", hits.totalHits == 10);
for (int j = 0; j < hits.scoreDocs.length; j++) {
@ -269,7 +269,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
PayloadNearQuery query;
TopDocs hits;
query = newPhraseQuery("field", "nine hundred ninety nine", true, new AveragePayloadFunction());
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
ScoreDoc doc = hits.scoreDocs[0];
// System.out.println("Doc: " + doc.toString());
@ -291,7 +291,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
SpanQuery q4 = newPhraseQuery("field", "hundred nine", false, new AveragePayloadFunction());
SpanQuery[]clauses = new SpanQuery[] {new PayloadNearQuery(new SpanQuery[] {q1,q2}, 0, true), new PayloadNearQuery(new SpanQuery[] {q3,q4}, 0, false)};
query = new PayloadNearQuery(clauses, 0, false);
hits = searcher.search(query, null, 100);
hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
// should be only 1 hit - doc 999
assertTrue("should only be one hit", hits.scoreDocs.length == 1);

View File

@ -147,7 +147,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
public void test() throws IOException {
PayloadTermQuery query = new PayloadTermQuery(new Term("field", "seventy"),
new MaxPayloadFunction());
TopDocs hits = searcher.search(query, null, 100);
TopDocs hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100);
@ -188,7 +188,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
public void testMultipleMatchesPerDoc() throws Exception {
PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"),
new MaxPayloadFunction());
TopDocs hits = searcher.search(query, null, 100);
TopDocs hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100);
@ -230,7 +230,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
IndexReader reader = DirectoryReader.open(directory);
IndexSearcher theSearcher = newSearcher(reader);
theSearcher.setSimilarity(new FullSimilarity());
TopDocs hits = searcher.search(query, null, 100);
TopDocs hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100);
@ -267,7 +267,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
public void testNoMatch() throws Exception {
PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.FIELD, "junk"),
new MaxPayloadFunction());
TopDocs hits = searcher.search(query, null, 100);
TopDocs hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("hits Size: " + hits.totalHits + " is not: " + 0, hits.totalHits == 0);
@ -283,7 +283,7 @@ public class TestPayloadTermQuery extends LuceneTestCase {
BooleanQuery query = new BooleanQuery();
query.add(c1);
query.add(c2);
TopDocs hits = searcher.search(query, null, 100);
TopDocs hits = searcher.search(query, 100);
assertTrue("hits is null and it shouldn't be", hits != null);
assertTrue("hits Size: " + hits.totalHits + " is not: " + 1, hits.totalHits == 1);
int[] results = new int[1];

View File

@ -136,7 +136,7 @@ public class TestSpansAdvanced extends LuceneTestCase {
// Hits hits = searcher.search(query);
// hits normalizes and throws things off if one score is greater than 1.0
TopDocs topdocs = s.search(query, null, 10000);
TopDocs topdocs = s.search(query, 10000);
/*****
* // display the hits System.out.println(hits.length() +

View File

@ -237,7 +237,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
writer.deleteDocuments(new Term("id", "0"));
reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), null, 1000).scoreDocs;
ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
dir.tweakBufferSizes();
assertEquals(36, hits.length);
@ -248,14 +248,14 @@ public class TestBufferedIndexInput extends LuceneTestCase {
reader = DirectoryReader.open(writer, true);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(bbb), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs;
dir.tweakBufferSizes();
assertEquals(35, hits.length);
dir.tweakBufferSizes();
hits = searcher.search(new TermQuery(new Term("id", "33")), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(new Term("id", "33")), 1000).scoreDocs;
dir.tweakBufferSizes();
assertEquals(1, hits.length);
hits = searcher.search(new TermQuery(aaa), null, 1000).scoreDocs;
hits = searcher.search(new TermQuery(aaa), 1000).scoreDocs;
dir.tweakBufferSizes();
assertEquals(35, hits.length);
writer.close();

View File

@ -280,7 +280,7 @@ public class TestLockFactory extends LuceneTestCase {
break;
}
try {
searcher.search(query, null, 1000);
searcher.search(query, 1000);
} catch (IOException e) {
hitException = true;
System.out.println("Stress Test Index Searcher: search hit unexpected exception: " + e.toString());

View File

@ -120,7 +120,7 @@ public class SearchFiles {
if (repeat > 0) { // repeat & time as benchmark
Date start = new Date();
for (int i = 0; i < repeat; i++) {
searcher.search(query, null, 100);
searcher.search(query, 100);
}
Date end = new Date();
System.out.println("Time: "+(end.getTime()-start.getTime())+"ms");

View File

@ -131,7 +131,7 @@ public class SimpleFacetsExample {
// MatchAllDocsQuery is for "browsing" (counts facets
// for all non-deleted docs in the index); normally
// you'd use a "normal" query:
searcher.search(new MatchAllDocsQuery(), null /*Filter */, fc);
searcher.search(new MatchAllDocsQuery(), fc);
// Retrieve results
List<FacetResult> results = new ArrayList<>();

View File

@ -105,7 +105,7 @@ public class TestDemoExpressions extends LuceneTestCase {
// create a sort field and sort by it (reverse order)
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
searcher.search(query, null, 3, sort);
searcher.search(query, 3, sort);
}
/** tests the returned sort values are correct */
@ -117,7 +117,7 @@ public class TestDemoExpressions extends LuceneTestCase {
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
TopFieldDocs td = searcher.search(query, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = (float) Math.sqrt(d.score);
@ -135,7 +135,7 @@ public class TestDemoExpressions extends LuceneTestCase {
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
TopFieldDocs td = searcher.search(query, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = 2*d.score;
@ -154,7 +154,7 @@ public class TestDemoExpressions extends LuceneTestCase {
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
TopFieldDocs td = searcher.search(query, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = 2*d.score;
@ -174,7 +174,7 @@ public class TestDemoExpressions extends LuceneTestCase {
Sort sort = new Sort(expr2.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
TopFieldDocs td = searcher.search(query, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = 2*d.score;
@ -206,7 +206,7 @@ public class TestDemoExpressions extends LuceneTestCase {
Expression expr = JavascriptCompiler.compile(sb.toString());
Sort sort = new Sort(expr.getSortField(bindings, true));
Query query = new TermQuery(new Term("body", "contents"));
TopFieldDocs td = searcher.search(query, null, 3, sort, true, true);
TopFieldDocs td = searcher.search(query, 3, sort, true, true);
for (int i = 0; i < 3; i++) {
FieldDoc d = (FieldDoc) td.scoreDocs[i];
float expected = n*d.score;
@ -221,7 +221,7 @@ public class TestDemoExpressions extends LuceneTestCase {
bindings.add(new SortField("latitude", SortField.Type.DOUBLE));
bindings.add(new SortField("longitude", SortField.Type.DOUBLE));
Sort sort = new Sort(distance.getSortField(bindings, false));
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort);
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort);
FieldDoc d = (FieldDoc) td.scoreDocs[0];
assertEquals(0.4619D, (Double)d.fields[0], 1E-4);
@ -238,7 +238,7 @@ public class TestDemoExpressions extends LuceneTestCase {
SimpleBindings bindings = new SimpleBindings();
bindings.add("doc['popularity'].value", new IntFieldSource("popularity"));
Sort sort = new Sort(popularity.getSortField(bindings, true));
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort);
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort);
FieldDoc d = (FieldDoc)td.scoreDocs[0];
assertEquals(20D, (Double)d.fields[0], 1E-4);
@ -288,7 +288,7 @@ public class TestDemoExpressions extends LuceneTestCase {
}
};
Sort sort = new Sort(popularity.getSortField(bindings, false));
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort);
TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort);
FieldDoc d = (FieldDoc)td.scoreDocs[0];
assertEquals(2092D, (Double)d.fields[0], 1E-4);

View File

@ -86,23 +86,20 @@ public class TestExpressionSorts extends LuceneTestCase {
public void testQueries() throws Exception {
int n = atLeast(4);
for (int i = 0; i < n; i++) {
Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd")));
assertQuery(new MatchAllDocsQuery(), null);
assertQuery(new TermQuery(new Term("english", "one")), null);
assertQuery(new MatchAllDocsQuery(), odd);
assertQuery(new TermQuery(new Term("english", "four")), odd);
assertQuery(new MatchAllDocsQuery());
assertQuery(new TermQuery(new Term("english", "one")));
BooleanQuery bq = new BooleanQuery();
bq.add(new TermQuery(new Term("english", "one")), BooleanClause.Occur.SHOULD);
bq.add(new TermQuery(new Term("oddeven", "even")), BooleanClause.Occur.SHOULD);
assertQuery(bq, null);
assertQuery(bq);
// force in order
bq.add(new TermQuery(new Term("english", "two")), BooleanClause.Occur.SHOULD);
bq.setMinimumNumberShouldMatch(2);
assertQuery(bq, null);
assertQuery(bq);
}
}
void assertQuery(Query query, Filter filter) throws Exception {
void assertQuery(Query query) throws Exception {
for (int i = 0; i < 10; i++) {
boolean reversed = random().nextBoolean();
SortField fields[] = new SortField[] {
@ -114,13 +111,13 @@ public class TestExpressionSorts extends LuceneTestCase {
};
Collections.shuffle(Arrays.asList(fields), random());
int numSorts = TestUtil.nextInt(random(), 1, fields.length);
assertQuery(query, filter, new Sort(Arrays.copyOfRange(fields, 0, numSorts)));
assertQuery(query, new Sort(Arrays.copyOfRange(fields, 0, numSorts)));
}
}
void assertQuery(Query query, Filter filter, Sort sort) throws Exception {
void assertQuery(Query query, Sort sort) throws Exception {
int size = TestUtil.nextInt(random(), 1, searcher.getIndexReader().maxDoc() / 5);
TopDocs expected = searcher.search(query, filter, size, sort, random().nextBoolean(), random().nextBoolean());
TopDocs expected = searcher.search(query, size, sort, random().nextBoolean(), random().nextBoolean());
// make our actual sort, mutating original by replacing some of the
// sortfields with equivalent expressions
@ -141,12 +138,12 @@ public class TestExpressionSorts extends LuceneTestCase {
}
Sort mutatedSort = new Sort(mutated);
TopDocs actual = searcher.search(query, filter, size, mutatedSort, random().nextBoolean(), random().nextBoolean());
TopDocs actual = searcher.search(query, size, mutatedSort, random().nextBoolean(), random().nextBoolean());
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
if (size < actual.totalHits) {
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, filter, size, sort);
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, filter, size, mutatedSort);
expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort);
actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort);
CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs);
}
}

View File

@ -46,6 +46,7 @@ import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
@ -752,7 +753,11 @@ public class TestDrillSideways extends FacetTestCase {
verifyEquals(dimValues, s, expected, actual, scores, doUseDV);
// Make sure drill down doesn't change score:
TopDocs ddqHits = s.search(ddq, filter, numDocs);
Query q = ddq;
if (filter != null) {
q = new FilteredQuery(q, filter);
}
TopDocs ddqHits = s.search(q, numDocs);
assertEquals(expected.hits.size(), ddqHits.totalHits);
for(int i=0;i<expected.hits.size();i++) {
// Score should be IDENTICAL:

View File

@ -124,34 +124,19 @@ public class GroupingSearch {
* @return the grouped result as a {@link TopGroups} instance
* @throws IOException If any I/O related errors occur
*/
public <T> TopGroups<T> search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
return search(searcher, null, query, groupOffset, groupLimit);
}
/**
* Executes a grouped search. Both the first pass and second pass are executed on the specified searcher.
*
* @param searcher The {@link org.apache.lucene.search.IndexSearcher} instance to execute the grouped search on.
* @param filter The filter to execute with the grouping
* @param query The query to execute with the grouping
* @param groupOffset The group offset
* @param groupLimit The number of groups to return from the specified group offset
* @return the grouped result as a {@link TopGroups} instance
* @throws IOException If any I/O related errors occur
*/
@SuppressWarnings("unchecked")
public <T> TopGroups<T> search(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException {
public <T> TopGroups<T> search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
if (groupField != null || groupFunction != null) {
return groupByFieldOrFunction(searcher, filter, query, groupOffset, groupLimit);
return groupByFieldOrFunction(searcher, query, groupOffset, groupLimit);
} else if (groupEndDocs != null) {
return (TopGroups<T>) groupByDocBlock(searcher, filter, query, groupOffset, groupLimit);
return (TopGroups<T>) groupByDocBlock(searcher, query, groupOffset, groupLimit);
} else {
throw new IllegalStateException("Either groupField, groupFunction or groupEndDocs must be set."); // This can't happen...
}
}
@SuppressWarnings({"unchecked", "rawtypes"})
protected TopGroups groupByFieldOrFunction(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException {
protected TopGroups groupByFieldOrFunction(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
int topN = groupOffset + groupLimit;
final AbstractFirstPassGroupingCollector firstPassCollector;
final AbstractAllGroupsCollector allGroupsCollector;
@ -204,9 +189,9 @@ public class GroupingSearch {
} else {
cachedCollector = CachingCollector.create(firstRound, cacheScores, maxDocsToCache);
}
searcher.search(query, filter, cachedCollector);
searcher.search(query, cachedCollector);
} else {
searcher.search(query, filter, firstRound);
searcher.search(query, firstRound);
}
if (allGroups) {
@ -236,7 +221,7 @@ public class GroupingSearch {
if (cachedCollector != null && cachedCollector.isCached()) {
cachedCollector.replay(secondPassCollector);
} else {
searcher.search(query, filter, secondPassCollector);
searcher.search(query, secondPassCollector);
}
if (allGroups) {
@ -246,10 +231,10 @@ public class GroupingSearch {
}
}
protected TopGroups<?> groupByDocBlock(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException {
protected TopGroups<?> groupByDocBlock(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException {
int topN = groupOffset + groupLimit;
BlockGroupingCollector c = new BlockGroupingCollector(groupSort, topN, includeScores, groupEndDocs);
searcher.search(query, filter, c);
searcher.search(query, c);
int topNInsideGroup = groupDocsOffset + groupDocsLimit;
return c.getTopGroups(sortWithinGroup, groupOffset, groupDocsOffset, topNInsideGroup, fillSortFields);
}

View File

@ -123,7 +123,7 @@ public class GroupingSearchTest extends LuceneTestCase {
Sort groupSort = Sort.RELEVANCE;
GroupingSearch groupingSearch = createRandomGroupingSearch(groupField, groupSort, 5, canUseIDV);
TopGroups<?> groups = groupingSearch.search(indexSearcher, null, new TermQuery(new Term("content", "random")), 0, 10);
TopGroups<?> groups = groupingSearch.search(indexSearcher, new TermQuery(new Term("content", "random")), 0, 10);
assertEquals(7, groups.totalHitCount);
assertEquals(7, groups.totalGroupedHitCount);
@ -161,7 +161,7 @@ public class GroupingSearchTest extends LuceneTestCase {
Filter lastDocInBlock = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupend", "x"))));
groupingSearch = new GroupingSearch(lastDocInBlock);
groups = groupingSearch.search(indexSearcher, null, new TermQuery(new Term("content", "random")), 0, 10);
groups = groupingSearch.search(indexSearcher, new TermQuery(new Term("content", "random")), 0, 10);
assertEquals(7, groups.totalHitCount);
assertEquals(7, groups.totalGroupedHitCount);
@ -237,7 +237,7 @@ public class GroupingSearchTest extends LuceneTestCase {
GroupingSearch gs = new GroupingSearch("group");
gs.setAllGroups(true);
TopGroups<?> groups = gs.search(indexSearcher, null, new TermQuery(new Term("group", "foo")), 0, 10);
TopGroups<?> groups = gs.search(indexSearcher, new TermQuery(new Term("group", "foo")), 0, 10);
assertEquals(1, groups.totalHitCount);
//assertEquals(1, groups.totalGroupCount.intValue());
assertEquals(1, groups.totalGroupedHitCount);

View File

@ -1014,7 +1014,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// it rewrites to ConstantScoreQuery which cannot be highlighted
// query = unReWrittenQuery.rewrite(reader);
if (VERBOSE) System.out.println("Searching for: " + query.toString(FIELD_NAME));
hits = searcher.search(query, null, 1000);
hits = searcher.search(query, 1000);
for (int i = 0; i < hits.totalHits; i++) {
final StoredDocument doc = searcher.doc(hits.scoreDocs[i].doc);
@ -1036,7 +1036,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// try null field
hits = searcher.search(query, null, 1000);
hits = searcher.search(query, 1000);
numHighlights = 0;
@ -1061,7 +1061,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// try default field
hits = searcher.search(query, null, 1000);
hits = searcher.search(query, 1000);
numHighlights = 0;
@ -1541,7 +1541,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
if (VERBOSE) System.out.println("Searching with primitive query");
// forget to set this and...
// query=query.rewrite(reader);
TopDocs hits = searcher.search(query, null, 1000);
TopDocs hits = searcher.search(query, 1000);
// create an instance of the highlighter with the tags used to surround
// highlighted text
@ -1913,7 +1913,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
//Scorer scorer = new QueryTermScorer( query, "t_text1" );
Highlighter h = new Highlighter( scorer );
TopDocs hits = searcher.search(query, null, 10);
TopDocs hits = searcher.search(query, 10);
for( int i = 0; i < hits.totalHits; i++ ){
StoredDocument doc = searcher.doc( hits.scoreDocs[i].doc );
String result = h.getBestFragment( a, "t_text1", doc.get( "t_text1" ));
@ -1944,7 +1944,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
scorer.setUsePayloads(true);
Highlighter h = new Highlighter(scorer);
TopDocs hits = searcher.search(query, null, 10);
TopDocs hits = searcher.search(query, 10);
assertEquals(1, hits.scoreDocs.length);
TokenStream stream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), 0, FIELD_NAME, analyzer);
if (random().nextBoolean()) {
@ -1996,7 +1996,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// you must use a rewritten query!
query = unReWrittenQuery.rewrite(reader);
if (VERBOSE) System.out.println("Searching for: " + query.toString(FIELD_NAME));
hits = searcher.search(query, null, 1000);
hits = searcher.search(query, 1000);
}
public void assertExpectedHighlightCount(final int maxNumFragmentsRequired,

View File

@ -92,7 +92,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = new WildcardQuery(new Term("body", "te*"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -103,7 +103,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(new WildcardQuery(new Term("bogus", "te*")), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -144,7 +144,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = new PrefixQuery(new Term("body", "te"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -155,7 +155,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(new PrefixQuery(new Term("bogus", "te")), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -196,7 +196,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = new RegexpQuery(new Term("body", "te.*"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -207,7 +207,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(new RegexpQuery(new Term("bogus", "te.*")), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -248,7 +248,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = new FuzzyQuery(new Term("body", "tets"), 1);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -257,7 +257,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
// with prefix
query = new FuzzyQuery(new Term("body", "tets"), 1, 2);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -268,7 +268,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(new FuzzyQuery(new Term("bogus", "tets"), 1), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -309,7 +309,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = TermRangeQuery.newStringRange("body", "ta", "tf", true, true);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -318,7 +318,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
// null start
query = TermRangeQuery.newStringRange("body", null, "tf", true, true);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -327,7 +327,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
// null end
query = TermRangeQuery.newStringRange("body", "ta", null, true, true);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -336,7 +336,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
// exact start inclusive
query = TermRangeQuery.newStringRange("body", "test", "tf", true, true);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -345,7 +345,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
// exact end inclusive
query = TermRangeQuery.newStringRange("body", "ta", "test", true, true);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -356,7 +356,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
BooleanQuery bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(TermRangeQuery.newStringRange("body", "test", "tf", false, true), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -367,7 +367,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(TermRangeQuery.newStringRange("body", "ta", "test", true, false), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -378,7 +378,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
bq = new BooleanQuery();
bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
bq.add(TermRangeQuery.newStringRange("bogus", "ta", "tf", true, true), BooleanClause.Occur.SHOULD);
topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(bq, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", bq, searcher, topDocs);
assertEquals(2, snippets.length);
@ -420,7 +420,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
};
BooleanQuery query = new BooleanQuery();
query.add(new WildcardQuery(new Term("body", "te*")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -431,7 +431,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
query = new BooleanQuery();
query.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD);
query.add(new WildcardQuery(new Term("bogus", "te*")), BooleanClause.Occur.MUST_NOT);
topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
snippets = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -474,7 +474,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
FilteredQuery query = new FilteredQuery(
new WildcardQuery(new Term("body", "te*")),
new QueryWrapperFilter(new TermQuery(new Term("body", "test"))));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -515,7 +515,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
ConstantScoreQuery query = new ConstantScoreQuery(new WildcardQuery(new Term("body", "te*")));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -557,7 +557,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
};
DisjunctionMaxQuery query = new DisjunctionMaxQuery(0);
query.add(new WildcardQuery(new Term("body", "te*")));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -598,7 +598,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
}
};
Query query = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -640,7 +640,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
};
SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
Query query = new SpanOrQuery(new SpanQuery[] { childQuery });
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -682,7 +682,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
};
SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
Query query = new SpanNearQuery(new SpanQuery[] { childQuery }, 0, true);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -725,7 +725,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
SpanQuery include = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
SpanQuery exclude = new SpanTermQuery(new Term("body", "bogus"));
Query query = new SpanNotQuery(include, exclude);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -767,7 +767,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
};
SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*")));
Query query = new SpanFirstQuery(childQuery, 1000000);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -811,7 +811,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase {
query.add(new WildcardQuery(new Term("body", "te*")), BooleanClause.Occur.SHOULD);
query.add(new WildcardQuery(new Term("body", "one")), BooleanClause.Occur.SHOULD);
query.add(new WildcardQuery(new Term("body", "se*")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);

View File

@ -76,7 +76,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -145,7 +145,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
PostingsHighlighter highlighter = new PostingsHighlighter(maxLength);
@ -179,7 +179,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
@ -214,7 +214,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -251,7 +251,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter(40);
Query query = new TermQuery(new Term("body", "field"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
@ -291,7 +291,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("title", "best")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
Map<String,String[]> snippets = highlighter.highlightFields(new String [] { "body", "title" }, query, searcher, topDocs);
assertEquals(2, snippets.size());
@ -329,7 +329,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "just")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "first")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(2, snippets.length);
@ -363,7 +363,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(2, snippets.length);
@ -401,7 +401,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
try {
highlighter.highlight("body", query, searcher, topDocs, 2);
@ -539,7 +539,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
@ -603,7 +603,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
}
};
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
@ -636,7 +636,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
IndexSearcher searcher = newSearcher(ir);
PostingsHighlighter highlighter = new PostingsHighlighter();
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(2, topDocs.totalHits);
ScoreDoc[] hits = topDocs.scoreDocs;
int[] docIDs = new int[2];
@ -688,7 +688,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
};
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2);
assertEquals(1, snippets.length);
@ -1015,7 +1015,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
}
};
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
@ -1059,7 +1059,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
}
};
Query query = new TermQuery(new Term("body", "field"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs);
assertEquals(1, snippets.length);
@ -1106,7 +1106,7 @@ public class TestPostingsHighlighter extends LuceneTestCase {
};
Query query = new TermQuery(new Term("body", "highlighting"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
int[] docIDs = new int[1];
docIDs[0] = topDocs.scoreDocs[0].doc;

View File

@ -272,7 +272,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase {
}
};
Query query = new TermQuery(new Term("body", "test"));
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1);
assertEquals(1, snippets.length);
@ -313,7 +313,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase {
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("body", "foo")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("body", "bar")), BooleanClause.Occur.SHOULD);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertEquals(1, topDocs.totalHits);
String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1);
assertEquals(1, snippets.length);

View File

@ -51,6 +51,7 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MultiTermQuery;
@ -246,8 +247,8 @@ public class TestBlockJoin extends LuceneTestCase {
assertEquals("Lisa", getParentDoc(r, parentsFilter, hits.scoreDocs[0].doc).get("name"));
// Test with filter on child docs:
assertEquals(0, s.search(fullChildQuery,
new QueryWrapperFilter(new TermQuery(new Term("skill", "foosball"))),
assertEquals(0, s.search(new FilteredQuery(fullChildQuery,
new QueryWrapperFilter(new TermQuery(new Term("skill", "foosball")))),
1).totalHits);
r.close();
@ -354,20 +355,20 @@ public class TestBlockJoin extends LuceneTestCase {
assertEquals("no filter - both passed", 2, s.search(childJoinQuery, 10).totalHits);
assertEquals("dummy filter passes everyone ", 2, s.search(childJoinQuery, parentsFilter, 10).totalHits);
assertEquals("dummy filter passes everyone ", 2, s.search(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))), 10).totalHits);
assertEquals("dummy filter passes everyone ", 2, s.search(new FilteredQuery(childJoinQuery, parentsFilter), 10).totalHits);
assertEquals("dummy filter passes everyone ", 2, s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("docType", "resume")))), 10).totalHits);
// not found test
assertEquals("noone live there", 0, s.search(childJoinQuery, new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("country", "Oz")))), 1).totalHits);
assertEquals("noone live there", 0, s.search(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("country", "Oz"))), 1).totalHits);
assertEquals("noone live there", 0, s.search(new FilteredQuery(childJoinQuery, new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("country", "Oz"))))), 1).totalHits);
assertEquals("noone live there", 0, s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("country", "Oz")))), 1).totalHits);
// apply the UK filter by the searcher
TopDocs ukOnly = s.search(childJoinQuery, new QueryWrapperFilter(parentQuery), 1);
TopDocs ukOnly = s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(parentQuery)), 1);
assertEquals("has filter - single passed", 1, ukOnly.totalHits);
assertEquals( "Lisa", r.document(ukOnly.scoreDocs[0].doc).get("name"));
// looking for US candidates
TopDocs usThen = s.search(childJoinQuery , new QueryWrapperFilter(new TermQuery(new Term("country", "United States"))), 1);
TopDocs usThen = s.search(new FilteredQuery(childJoinQuery , new QueryWrapperFilter(new TermQuery(new Term("country", "United States")))), 1);
assertEquals("has filter - single passed", 1, usThen.totalHits);
assertEquals("Frank", r.document(usThen.scoreDocs[0].doc).get("name"));
@ -377,14 +378,14 @@ public class TestBlockJoin extends LuceneTestCase {
s.search(new ToChildBlockJoinQuery(us,
parentsFilter), 10).totalHits );
assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter),
skill("java"), 10).totalHits );
assertEquals("java skills in US", 1, s.search(new FilteredQuery(new ToChildBlockJoinQuery(us, parentsFilter),
skill("java")), 10).totalHits );
BooleanQuery rubyPython = new BooleanQuery();
rubyPython.add(new TermQuery(new Term("skill", "ruby")), Occur.SHOULD);
rubyPython.add(new TermQuery(new Term("skill", "python")), Occur.SHOULD);
assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter),
new QueryWrapperFilter(rubyPython), 10).totalHits );
assertEquals("ruby skills in US", 1, s.search(new FilteredQuery(new ToChildBlockJoinQuery(us, parentsFilter),
new QueryWrapperFilter(rubyPython)), 10).totalHits );
r.close();
dir.close();
@ -786,7 +787,7 @@ public class TestBlockJoin extends LuceneTestCase {
sortFields.addAll(Arrays.asList(childSort.getSort()));
final Sort parentAndChildSort = new Sort(sortFields.toArray(new SortField[sortFields.size()]));
final TopDocs results = s.search(parentQuery, null, r.numDocs(),
final TopDocs results = s.search(parentQuery, r.numDocs(),
parentAndChildSort);
if (VERBOSE) {
@ -922,30 +923,24 @@ public class TestBlockJoin extends LuceneTestCase {
final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter);
// To run against the block-join index:
final Query childJoinQuery2;
Query childJoinQuery2;
// Same query as parentJoinQuery, but to run against
// the fully denormalized index (so we can compare
// results):
final Query childQuery2;
// apply a filter to children
final Filter childFilter2, childJoinFilter2;
Query childQuery2;
if (random().nextBoolean()) {
childQuery2 = parentQuery2;
childJoinQuery2 = parentJoinQuery2;
childFilter2 = null;
childJoinFilter2 = null;
} else {
final Term childTerm = randomChildTerm(childFields[0]);
if (random().nextBoolean()) { // filtered case
childJoinQuery2 = parentJoinQuery2;
final Filter f = new QueryWrapperFilter(new TermQuery(childTerm));
childJoinFilter2 = random().nextBoolean()
? new BitDocIdSetCachingWrapperFilter(f): f;
childJoinQuery2 = new FilteredQuery(childJoinQuery2, random().nextBoolean()
? new BitDocIdSetCachingWrapperFilter(f): f);
} else {
childJoinFilter2 = null;
// AND child field w/ parent query:
final BooleanQuery bq = new BooleanQuery();
childJoinQuery2 = bq;
@ -963,10 +958,9 @@ public class TestBlockJoin extends LuceneTestCase {
if (random().nextBoolean()) { // filtered case
childQuery2 = parentQuery2;
final Filter f = new QueryWrapperFilter(new TermQuery(childTerm));
childFilter2 = random().nextBoolean()
? new BitDocIdSetCachingWrapperFilter(f): f;
childQuery2 = new FilteredQuery(childQuery2, random().nextBoolean()
? new BitDocIdSetCachingWrapperFilter(f): f);
} else {
childFilter2 = null;
final BooleanQuery bq2 = new BooleanQuery();
childQuery2 = bq2;
if (random().nextBoolean()) {
@ -985,11 +979,9 @@ public class TestBlockJoin extends LuceneTestCase {
// Search denormalized index:
if (VERBOSE) {
System.out.println("TEST: run top down query=" + childQuery2 +
" filter=" + childFilter2 +
" sort=" + childSort2);
System.out.println("TEST: run top down query=" + childQuery2 + " sort=" + childSort2);
}
final TopDocs results2 = s.search(childQuery2, childFilter2, r.numDocs(),
final TopDocs results2 = s.search(childQuery2, r.numDocs(),
childSort2);
if (VERBOSE) {
System.out.println(" " + results2.totalHits + " totalHits:");
@ -1001,10 +993,9 @@ public class TestBlockJoin extends LuceneTestCase {
// Search join index:
if (VERBOSE) {
System.out.println("TEST: run top down join query=" + childJoinQuery2 +
" filter=" + childJoinFilter2 + " sort=" + childSort2);
System.out.println("TEST: run top down join query=" + childJoinQuery2 + " sort=" + childSort2);
}
TopDocs joinResults2 = joinS.search(childJoinQuery2, childJoinFilter2, joinR.numDocs(), childSort2);
TopDocs joinResults2 = joinS.search(childJoinQuery2, joinR.numDocs(), childSort2);
if (VERBOSE) {
System.out.println(" " + joinResults2.totalHits + " totalHits:");
for(ScoreDoc sd : joinResults2.scoreDocs) {

View File

@ -31,6 +31,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
@ -121,7 +122,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("common_field", "1")));
thrown.expect(IllegalStateException.class);
thrown.expectMessage(ToChildBlockJoinQuery.ILLEGAL_ADVANCE_ON_PARENT);
indexSearcher.search(blockJoinQuery, childFilter, 1);
indexSearcher.search(new FilteredQuery(blockJoinQuery, childFilter), 1);
}
@Test

View File

@ -1006,7 +1006,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
Collections.singletonMap("f", Type.SORTED));
w.close();
IndexSearcher s = newSearcher(r);
TopDocs hits = s.search(new TermQuery(new Term("t", "1")), null, 10, new Sort(new SortField("f", SortField.Type.STRING)));
TopDocs hits = s.search(new TermQuery(new Term("t", "1")), 10, new Sort(new SortField("f", SortField.Type.STRING)));
assertEquals(2, hits.totalHits);
// null sorts first
assertEquals(1, hits.scoreDocs[0].doc);
@ -1035,7 +1035,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
Collections.singletonMap("string", Type.SORTED));
IndexSearcher searcher = new IndexSearcher(reader);
try {
searcher.search(new MatchAllDocsQuery(), null, 500, sort);
searcher.search(new MatchAllDocsQuery(), 500, sort);
fail("didn't get expected exception");
} catch (IllegalStateException expected) {}
reader.close();
@ -1069,10 +1069,10 @@ public class TestFieldCacheSort extends LuceneTestCase {
Query q = new TermQuery(new Term("body", "text"));
IndexSearcher s = newSearcher(r);
float maxScore = s.search(q , 10).getMaxScore();
assertEquals(maxScore, s.search(q, null, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0);
assertEquals(maxScore, s.search(q, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0);
TestUtil.checkReader(r);
r.close();
d.close();
@ -1084,27 +1084,27 @@ public class TestFieldCacheSort extends LuceneTestCase {
Query query = new TermQuery(new Term("contents", "foo"));
Sort sort = new Sort();
TopDocs td = empty.search(query, null, 10, sort, true, true);
TopDocs td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
sort.setSort(SortField.FIELD_DOC);
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
sort.setSort(new SortField("int", SortField.Type.INT), SortField.FIELD_DOC);
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
sort.setSort(new SortField("string", SortField.Type.STRING, true), SortField.FIELD_DOC);
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
sort.setSort(new SortField("string_val", SortField.Type.STRING_VAL, true), SortField.FIELD_DOC);
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
sort.setSort(new SortField("float", SortField.Type.FLOAT), new SortField("string", SortField.Type.STRING));
td = empty.search(query, null, 10, sort, true, true);
td = empty.search(query, 10, sort, true, true);
assertEquals(0, td.totalHits);
}
@ -1146,7 +1146,7 @@ public class TestFieldCacheSort extends LuceneTestCase {
TopDocs expected = searcher.search(new TermQuery(new Term("value", "foo")), 10);
assertEquals(1, expected.totalHits);
TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), null, 10, sort, true, true);
TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), 10, sort, true, true);
assertEquals(expected.totalHits, actual.totalHits);
assertEquals(expected.scoreDocs[0].score, actual.scoreDocs[0].score, 0F);

View File

@ -43,6 +43,7 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Sort;
@ -173,13 +174,13 @@ public class TestFieldCacheSortRandom extends LuceneTestCase {
// Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return
// the clause instead of BQ.
bq.setMinimumNumberShouldMatch(1);
hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
hits = s.search(new FilteredQuery(bq, f), hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else if (queryType == 1) {
hits = s.search(new ConstantScoreQuery(f),
null, hitCount, sort, random.nextBoolean(), random.nextBoolean());
hitCount, sort, random.nextBoolean(), random.nextBoolean());
} else {
hits = s.search(new MatchAllDocsQuery(),
f, hitCount, sort, random.nextBoolean(), random.nextBoolean());
hits = s.search(new FilteredQuery(new MatchAllDocsQuery(),
f), hitCount, sort, random.nextBoolean(), random.nextBoolean());
}
if (VERBOSE) {

View File

@ -127,7 +127,7 @@ public class TestNumericTerms32 extends LuceneTestCase {
int a=lower; lower=upper; upper=a;
}
Query tq=NumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true);
TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.Type.INT, true)));
TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.INT, true)));
if (topDocs.totalHits==0) continue;
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);

View File

@ -132,7 +132,7 @@ public class TestNumericTerms64 extends LuceneTestCase {
long a=lower; lower=upper; upper=a;
}
Query tq=NumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true);
TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true)));
TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true)));
if (topDocs.totalHits==0) continue;
ScoreDoc[] sd = topDocs.scoreDocs;
assertNotNull(sd);

View File

@ -263,11 +263,11 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
log(q5CustomMulAdd);
// do al the searches
TopDocs td1 = s.search(q1, null, 1000);
TopDocs td2CustomNeutral = s.search(q2CustomNeutral, null, 1000);
TopDocs td3CustomMul = s.search(q3CustomMul, null, 1000);
TopDocs td4CustomAdd = s.search(q4CustomAdd, null, 1000);
TopDocs td5CustomMulAdd = s.search(q5CustomMulAdd, null, 1000);
TopDocs td1 = s.search(q1, 1000);
TopDocs td2CustomNeutral = s.search(q2CustomNeutral, 1000);
TopDocs td3CustomMul = s.search(q3CustomMul, 1000);
TopDocs td4CustomAdd = s.search(q4CustomAdd, 1000);
TopDocs td5CustomMulAdd = s.search(q5CustomMulAdd, 1000);
// put results in map so we can verify the scores although they have changed
Map<Integer,Float> h1 = topDocsToMap(td1);

View File

@ -64,7 +64,7 @@ public class TestFieldScoreQuery extends FunctionTestSetup {
IndexSearcher s = newSearcher(r);
log("test: "+ functionQuery);
QueryUtils.check(random(), functionQuery,s);
ScoreDoc[] h = s.search(functionQuery, null, 1000).scoreDocs;
ScoreDoc[] h = s.search(functionQuery, 1000).scoreDocs;
assertEquals("All docs should be matched!",N_DOCS,h.length);
String prevID = "ID"+(N_DOCS+1); // greater than all ids of docs in this test
for (int i=0; i<h.length; i++) {
@ -95,7 +95,7 @@ public class TestFieldScoreQuery extends FunctionTestSetup {
FunctionQuery functionQuery = new FunctionQuery(valueSource);
IndexReader r = DirectoryReader.open(dir);
IndexSearcher s = newSearcher(r);
TopDocs td = s.search(functionQuery,null,1000);
TopDocs td = s.search(functionQuery,1000);
assertEquals("All docs should be matched!",N_DOCS,td.totalHits);
ScoreDoc sd[] = td.scoreDocs;
for (ScoreDoc aSd : sd) {

View File

@ -569,7 +569,7 @@ public class TestValueSources extends LuceneTestCase {
expectedDocs[i] = i;
expected[i] = new ScoreDoc(i, scores[i]);
}
TopDocs docs = searcher.search(q, null, documents.size(),
TopDocs docs = searcher.search(q, documents.size(),
new Sort(new SortField("id", SortField.Type.STRING)), true, false);
CheckHits.checkHits(random(), q, "", searcher, expectedDocs);
CheckHits.checkHitsQuery(q, expected, docs.scoreDocs, expectedDocs);

View File

@ -298,7 +298,7 @@ public class TestMultiFieldQueryParser extends LuceneTestCase {
Query q = mfqp.parse("the footest");
IndexReader ir = DirectoryReader.open(ramDir);
IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = is.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
ir.close();
ramDir.close();

View File

@ -334,7 +334,7 @@ public class TestMultiFieldQPHelper extends LuceneTestCase {
Query q = mfqp.parse("the footest", null);
IndexReader ir = DirectoryReader.open(ramDir);
IndexSearcher is = newSearcher(ir);
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = is.search(q, 1000).scoreDocs;
assertEquals(1, hits.length);
ir.close();
ramDir.close();

View File

@ -1257,7 +1257,7 @@ public class TestQPHelper extends LuceneTestCase {
qp.setLocale(Locale.ENGLISH);
Query q = qp.parse(query, "date");
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = is.search(q, 1000).scoreDocs;
assertEquals(expected, hits.length);
}

View File

@ -1072,7 +1072,7 @@ public abstract class QueryParserTestBase extends LuceneTestCase {
CommonQueryParserConfiguration qp = getParserConfig( new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
qp.setLocale(Locale.ENGLISH);
Query q = getQuery(query,qp);
ScoreDoc[] hits = is.search(q, null, 1000).scoreDocs;
ScoreDoc[] hits = is.search(q, 1000).scoreDocs;
assertEquals(expected, hits.length);
setDefaultField( oldDefaultField );
}

View File

@ -133,7 +133,7 @@ public class TestParser extends LuceneTestCase {
public void testCustomFieldUserQueryXML() throws ParserException, IOException {
Query q = parse("UserInputQueryCustomField.xml");
int h = searcher.search(q, null, 1000).totalHits;
int h = searcher.search(q, 1000).totalHits;
assertEquals("UserInputQueryCustomField should produce 0 result ", 0, h);
}
@ -190,7 +190,7 @@ public class TestParser extends LuceneTestCase {
List<LeafReaderContext> leaves = searcher.getTopReaderContext().leaves();
Assume.assumeTrue(leaves.size() == 1);
Query q = parse("DuplicateFilterQuery.xml");
int h = searcher.search(q, null, 1000).totalHits;
int h = searcher.search(q, 1000).totalHits;
assertEquals("DuplicateFilterQuery should produce 1 result ", 1, h);
}
@ -217,7 +217,7 @@ public class TestParser extends LuceneTestCase {
if (VERBOSE) {
System.out.println("TEST: query=" + q);
}
TopDocs hits = searcher.search(q, null, numDocs);
TopDocs hits = searcher.search(q, numDocs);
assertTrue(qType + " should produce results ", hits.totalHits > 0);
if (VERBOSE) {
System.out.println("=========" + qType + "============");

View File

@ -98,7 +98,7 @@ public class TestQueryTemplateManager extends LuceneTestCase {
Query q = builder.getQuery(doc.getDocumentElement());
//Run the query
int h = searcher.search(q, null, 1000).totalHits;
int h = searcher.search(q, 1000).totalHits;
//Check we have the expected number of results
int expectedHits = Integer.parseInt(queryFormProperties.getProperty("expectedMatches"));

Some files were not shown because too many files have changed in this diff Show More