From 2cd3fb807daf7a82d71c2ffeb44c75e2bd0814fc Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 24 Feb 2015 17:43:10 +0000 Subject: [PATCH] LUCENE-6286: Removed IndexSearcher methods that take a Filter object. git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1662059 13f79535-47bb-0310-9956-ffa450edef68 --- lucene/CHANGES.txt | 3 + .../shingle/ShingleAnalyzerWrapperTest.java | 4 +- .../index/TestBackwardsCompatibility.java | 20 +-- .../benchmark/byTask/tasks/ReadTask.java | 4 +- .../benchmark/quality/QualityBenchmark.java | 2 +- .../org/apache/lucene/search/FieldDoc.java | 2 +- .../lucene/search/FieldValueHitQueue.java | 4 +- .../apache/lucene/search/IndexSearcher.java | 108 ++-------------- .../org/apache/lucene/search/TopDocs.java | 1 - .../apache/lucene/search/TopFieldDocs.java | 2 +- .../apache/lucene/search/package-info.java | 3 +- .../src/test/org/apache/lucene/TestDemo.java | 4 +- .../test/org/apache/lucene/TestSearch.java | 4 +- .../lucene/TestSearchForDuplicates.java | 4 +- .../perfield/TestPerFieldDocValuesFormat.java | 2 +- .../apache/lucene/document/TestDocument.java | 6 +- .../org/apache/lucene/index/TestCodecs.java | 11 -- .../lucene/index/TestDeletionPolicy.java | 6 +- .../index/TestDirectoryReaderReopen.java | 2 +- .../apache/lucene/index/TestIndexWriter.java | 4 +- .../lucene/index/TestIndexWriterCommit.java | 16 +-- .../lucene/index/TestIndexWriterDelete.java | 4 +- .../lucene/index/TestIndexWriterMaxDocs.java | 2 +- .../index/TestIndexWriterOnDiskFull.java | 4 +- .../lucene/index/TestLazyProxSkipping.java | 2 +- .../apache/lucene/index/TestManyFields.java | 2 +- .../index/TestParallelCompositeReader.java | 4 +- .../lucene/index/TestParallelLeafReader.java | 4 +- .../apache/lucene/search/TestBoolean2.java | 8 +- .../search/TestBooleanMinShouldMatch.java | 14 +- .../apache/lucene/search/TestBooleanOr.java | 2 +- .../lucene/search/TestBooleanScorer.java | 2 +- .../search/TestCachingWrapperFilter.java | 20 +-- .../lucene/search/TestConstantScoreQuery.java | 4 +- .../lucene/search/TestCustomSearcherSort.java | 12 +- .../apache/lucene/search/TestDateFilter.java | 24 ++-- .../apache/lucene/search/TestDateSort.java | 2 +- .../search/TestDisjunctionMaxQuery.java | 16 +-- .../apache/lucene/search/TestDocIdSet.java | 4 +- .../search/TestElevationComparator.java | 2 +- .../search/TestFieldCacheTermsFilter.java | 7 +- .../lucene/search/TestFilteredQuery.java | 22 ++-- .../lucene/search/TestFilteredSearch.java | 2 +- .../apache/lucene/search/TestFuzzyQuery.java | 56 ++++---- .../lucene/search/TestIndexSearcher.java | 35 ++--- .../lucene/search/TestMatchAllDocsQuery.java | 8 +- .../lucene/search/TestMultiPhraseQuery.java | 14 +- .../search/TestMultiTermConstantScore.java | 120 +++++++++--------- .../apache/lucene/search/TestNeedsScores.java | 2 +- .../org/apache/lucene/search/TestNot.java | 2 +- .../search/TestNumericRangeQuery32.java | 16 +-- .../search/TestNumericRangeQuery64.java | 16 +-- .../lucene/search/TestPhrasePrefixQuery.java | 4 +- .../apache/lucene/search/TestPhraseQuery.java | 66 +++++----- .../lucene/search/TestPositionIncrement.java | 24 ++-- .../lucene/search/TestPrefixFilter.java | 18 +-- .../search/TestPrefixInBooleanQuery.java | 8 +- .../apache/lucene/search/TestPrefixQuery.java | 6 +- .../lucene/search/TestQueryWrapperFilter.java | 22 ++-- .../apache/lucene/search/TestSearchAfter.java | 31 ++--- .../apache/lucene/search/TestSortRandom.java | 11 +- .../search/TestSortedNumericSortField.java | 4 +- .../lucene/search/TestSortedSetSortField.java | 4 +- .../lucene/search/TestSubScorerFreqs.java | 6 +- .../lucene/search/TestTermRangeFilter.java | 68 +++++----- .../lucene/search/TestTermRangeQuery.java | 32 ++--- .../search/TestTimeLimitingCollector.java | 2 +- .../search/TestTotalHitCountCollector.java | 2 +- .../apache/lucene/search/TestWildcard.java | 10 +- .../search/payloads/TestPayloadNearQuery.java | 16 +-- .../search/payloads/TestPayloadTermQuery.java | 10 +- .../search/spans/TestSpansAdvanced.java | 2 +- .../lucene/store/TestBufferedIndexInput.java | 8 +- .../apache/lucene/store/TestLockFactory.java | 2 +- .../org/apache/lucene/demo/SearchFiles.java | 2 +- .../demo/facet/SimpleFacetsExample.java | 2 +- .../expressions/TestDemoExpressions.java | 18 +-- .../expressions/TestExpressionSorts.java | 25 ++-- .../lucene/facet/TestDrillSideways.java | 7 +- .../search/grouping/GroupingSearch.java | 33 ++--- .../search/grouping/GroupingSearchTest.java | 6 +- .../search/highlight/HighlighterTest.java | 14 +- .../TestMultiTermHighlighting.java | 56 ++++---- .../TestPostingsHighlighter.java | 32 ++--- .../TestPostingsHighlighterRanking.java | 4 +- .../lucene/search/join/TestBlockJoin.java | 57 ++++----- .../search/join/TestBlockJoinValidation.java | 3 +- .../uninverting/TestFieldCacheSort.java | 26 ++-- .../uninverting/TestFieldCacheSortRandom.java | 9 +- .../uninverting/TestNumericTerms32.java | 2 +- .../uninverting/TestNumericTerms64.java | 2 +- .../lucene/queries/TestCustomScoreQuery.java | 10 +- .../queries/function/TestFieldScoreQuery.java | 4 +- .../queries/function/TestValueSources.java | 2 +- .../classic/TestMultiFieldQueryParser.java | 2 +- .../standard/TestMultiFieldQPHelper.java | 2 +- .../flexible/standard/TestQPHelper.java | 2 +- .../queryparser/util/QueryParserTestBase.java | 2 +- .../lucene/queryparser/xml/TestParser.java | 6 +- .../xml/TestQueryTemplateManager.java | 2 +- .../sandbox/queries/DuplicateFilterTest.java | 11 +- .../sandbox/queries/TestSlowFuzzyQuery.java | 92 +++++++------- .../sandbox/queries/regex/TestRegexQuery.java | 4 +- .../queries/regex/TestSpanRegexQuery.java | 2 +- .../lucene/search/TestTermAutomatonQuery.java | 13 +- .../apache/lucene/spatial/SpatialExample.java | 4 +- .../prefix/HeatmapFacetCounterTest.java | 4 +- .../lucene/search/spell/SpellChecker.java | 2 +- .../lucene/analysis/CollationTestBase.java | 15 ++- .../index/BaseDocValuesFormatTestCase.java | 18 +-- .../ThreadedIndexingAndSearchingTestCase.java | 4 +- .../lucene/search/AssertingIndexSearcher.java | 8 -- .../org/apache/lucene/search/CheckHits.java | 15 +-- .../search/SearchEquivalenceTestBase.java | 16 ++- .../accumulator/FacetingAccumulator.java | 5 +- .../org/apache/solr/handler/BlobHandler.java | 2 +- .../handler/component/ExpandComponent.java | 7 +- .../org/apache/solr/request/SimpleFacets.java | 9 +- .../java/org/apache/solr/search/Grouping.java | 7 +- .../solr/search/LuceneQueryOptimizer.java | 2 +- .../apache/solr/search/SolrIndexSearcher.java | 33 +++-- .../solr/search/grouping/CommandHandler.java | 11 +- .../test/org/apache/solr/search/TestSort.java | 5 +- .../solr/search/function/TestOrdValues.java | 4 +- 124 files changed, 759 insertions(+), 864 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index bc1c0a6fc68..cf7bdad151f 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -156,6 +156,9 @@ API Changes performance would be achieved through CollationKeyAnalyzer or ICUCollationKeyAnalyzer. (Adrien Grand) +* LUCENE-6286: Removed IndexSearcher methods that take a Filter object. + A BooleanQuery with a filter clause must be used instead. (Adrien Grand) + Other * LUCENE-6248: Remove unused odd constants from StandardSyntaxParser.jj diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java index b905d0abd7d..15d498a0c9f 100644 --- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java +++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java @@ -114,7 +114,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase { ts.end(); } - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; int[] ranks = new int[] { 0 }; compareRanks(hits, ranks); } @@ -139,7 +139,7 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase { ts.end(); } - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; int[] ranks = new int[] { 1, 2, 0 }; compareRanks(hits, ranks); } diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java index 343edf6cc3e..1c4b4316d7b 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java @@ -729,7 +729,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { } } - ScoreDoc[] hits = searcher.search(new TermQuery(new Term(new String("content"), "aaa")), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(new Term(new String("content"), "aaa")), 1000).scoreDocs; // First document should be #0 StoredDocument d = searcher.getIndexReader().document(hits[0].doc); @@ -738,20 +738,20 @@ public class TestBackwardsCompatibility extends LuceneTestCase { doTestHits(hits, 34, searcher.getIndexReader()); if (is40Index) { - hits = searcher.search(new TermQuery(new Term(new String("content5"), "aaa")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term(new String("content5"), "aaa")), 1000).scoreDocs; doTestHits(hits, 34, searcher.getIndexReader()); - hits = searcher.search(new TermQuery(new Term(new String("content6"), "aaa")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term(new String("content6"), "aaa")), 1000).scoreDocs; doTestHits(hits, 34, searcher.getIndexReader()); } - hits = searcher.search(new TermQuery(new Term("utf8", "\u0000")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term("utf8", "\u0000")), 1000).scoreDocs; assertEquals(34, hits.length); - hits = searcher.search(new TermQuery(new Term(new String("utf8"), "lu\uD834\uDD1Ece\uD834\uDD60ne")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term(new String("utf8"), "lu\uD834\uDD1Ece\uD834\uDD60ne")), 1000).scoreDocs; assertEquals(34, hits.length); - hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term("utf8", "ab\ud917\udc17cd")), 1000).scoreDocs; assertEquals(34, hits.length); reader.close(); @@ -775,7 +775,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { // make sure searching sees right # hits IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; StoredDocument d = searcher.getIndexReader().document(hits[0].doc); assertEquals("wrong first document", "0", d.get("id")); doTestHits(hits, 44, searcher.getIndexReader()); @@ -790,7 +790,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 44, hits.length); d = searcher.doc(hits[0].doc); doTestHits(hits, 44, searcher.getIndexReader()); @@ -802,7 +802,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { // make sure searching sees right # hits DirectoryReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 34, hits.length); StoredDocument d = searcher.doc(hits[0].doc); assertEquals("wrong first document", "0", d.get("id")); @@ -816,7 +816,7 @@ public class TestBackwardsCompatibility extends LuceneTestCase { reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(new Term("content", "aaa")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs; assertEquals("wrong number of hits", 34, hits.length); doTestHits(hits, 34, searcher.getIndexReader()); reader.close(); diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java index 7d8dbb589fc..5c6639fae94 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/ReadTask.java @@ -124,14 +124,14 @@ public abstract class ReadTask extends PerfTask { TopFieldCollector collector = TopFieldCollector.create(sort, numHits, true, withScore(), withMaxScore()); - searcher.search(q, null, collector); + searcher.search(q, collector); hits = collector.topDocs(); } else { hits = searcher.search(q, numHits); } } else { Collector collector = createCollector(); - searcher.search(q, null, collector); + searcher.search(q, collector); //hits = collector.topDocs(); } diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/quality/QualityBenchmark.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/quality/QualityBenchmark.java index 37fec7460bf..f1a68fc60ec 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/quality/QualityBenchmark.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/quality/QualityBenchmark.java @@ -94,7 +94,7 @@ public class QualityBenchmark { Query q = qqParser.parse(qq); // search with this query long t1 = System.currentTimeMillis(); - TopDocs td = searcher.search(q,null,maxResults); + TopDocs td = searcher.search(q,maxResults); long searchTime = System.currentTimeMillis()-t1; //most likely we either submit or judge, but check both if (judge!=null) { diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldDoc.java b/lucene/core/src/java/org/apache/lucene/search/FieldDoc.java index 4748841a55e..64c8af9dab5 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FieldDoc.java +++ b/lucene/core/src/java/org/apache/lucene/search/FieldDoc.java @@ -46,7 +46,7 @@ public class FieldDoc extends ScoreDoc { * the value method corresponding * FieldComparator used to sort this field. * @see Sort - * @see IndexSearcher#search(Query,Filter,int,Sort) + * @see IndexSearcher#search(Query,int,Sort) */ public Object[] fields; diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java b/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java index 3a6f664225e..c464f23509b 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java +++ b/lucene/core/src/java/org/apache/lucene/search/FieldValueHitQueue.java @@ -27,7 +27,7 @@ import org.apache.lucene.util.PriorityQueue; * * @lucene.experimental * @since 2.9 - * @see IndexSearcher#search(Query,Filter,int,Sort) + * @see IndexSearcher#search(Query,int,Sort) */ public abstract class FieldValueHitQueue extends PriorityQueue { @@ -202,7 +202,7 @@ public abstract class FieldValueHitQueue ext * * @param entry The Entry used to create a FieldDoc * @return The newly created FieldDoc - * @see IndexSearcher#search(Query,Filter,int,Sort) + * @see IndexSearcher#search(Query,int,Sort) */ FieldDoc fillFields(final Entry entry) { final int n = comparators.length; diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java index 596ede4784f..e90df9bdb41 100644 --- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java +++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java @@ -47,8 +47,7 @@ import org.apache.lucene.util.ThreadInterruptedException; /** Implements search over a single IndexReader. * *

Applications usually need only call the inherited - * {@link #search(Query,int)} - * or {@link #search(Query,Filter,int)} methods. For + * {@link #search(Query,int)} method. For * performance reasons, if your index is unchanging, you * should share a single IndexSearcher instance across * multiple searches instead of creating a new one @@ -209,11 +208,6 @@ public class IndexSearcher { public Similarity getSimilarity() { return similarity; } - - /** @lucene.internal */ - protected Query wrapFilter(Query query, Filter filter) { - return (filter == null) ? query : new FilteredQuery(query, filter); - } /** Finds the top n * hits for query where all results are after a previous @@ -275,21 +269,6 @@ public class IndexSearcher { } } - /** Finds the top n - * hits for query, applying filter if non-null, - * where all results are after a previous result (after). - *

- * By passing the bottom result from a previous page as after, - * this method can be used for efficient 'deep-paging' across potentially - * large result sets. - * - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException { - return searchAfter(after, wrapFilter(query, filter), n); - } - /** Finds the top n * hits for query. * @@ -301,34 +280,6 @@ public class IndexSearcher { return searchAfter(null, query, n); } - - /** Finds the top n - * hits for query, applying filter if non-null. - * - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - public TopDocs search(Query query, Filter filter, int n) - throws IOException { - return search(wrapFilter(query, filter), n); - } - - /** Lower-level search API. - * - *

{@link LeafCollector#collect(int)} is called for every matching - * document. - * - * @param query to match documents - * @param filter if non-null, used to permit documents to be collected. - * @param results to receive hits - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - public void search(Query query, Filter filter, Collector results) - throws IOException { - search(wrapFilter(query, filter), results); - } - /** Lower-level search API. * *

{@link LeafCollector#collect(int)} is called for every matching document. @@ -340,30 +291,13 @@ public class IndexSearcher { throws IOException { search(leafContexts, createNormalizedWeight(query, results.needsScores()), results); } - - /** Search implementation with arbitrary sorting. Finds - * the top n hits for query, applying - * filter if non-null, and sorting the hits by the criteria in - * sort. - * - *

NOTE: this does not compute scores by default; use - * {@link IndexSearcher#search(Query,Filter,int,Sort,boolean,boolean)} to - * control scoring. - * - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - public TopFieldDocs search(Query query, Filter filter, int n, - Sort sort) throws IOException { - return search(query, filter, n, sort, false, false); - } /** Search implementation with arbitrary sorting, plus * control over whether hit scores and max score * should be computed. Finds - * the top n hits for query, applying - * filter if non-null, and sorting the hits by the criteria in - * sort. If doDocScores is true + * the top n hits for query, and sorting + * the hits by the criteria in sort. + * If doDocScores is true * then the score of each hit will be computed and * returned. If doMaxScore is * true then the maximum score over all @@ -372,37 +306,21 @@ public class IndexSearcher { * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ - public TopFieldDocs search(Query query, Filter filter, int n, - Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException { - return searchAfter(null, query, filter, n, sort, doDocScores, doMaxScore); - } - - /** Finds the top n - * hits for query, applying filter if non-null, - * where all results are after a previous result (after). - *

- * By passing the bottom result from a previous page as after, - * this method can be used for efficient 'deep-paging' across potentially - * large result sets. - * - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException { - return searchAfter(after, query, filter, n, sort, false, false); + public TopFieldDocs search(Query query, int n, + Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException { + return searchAfter(null, query, n, sort, doDocScores, doMaxScore); } /** - * Search implementation with arbitrary sorting and no filter. + * Search implementation with arbitrary sorting. * @param query The query to search for * @param n Return only the top n results * @param sort The {@link org.apache.lucene.search.Sort} object * @return The top docs, sorted according to the supplied {@link org.apache.lucene.search.Sort} instance * @throws IOException if there is a low-level I/O error */ - public TopFieldDocs search(Query query, int n, - Sort sort) throws IOException { - return search(query, null, n, sort, false, false); + public TopFieldDocs search(Query query, int n, Sort sort) throws IOException { + return searchAfter(null, query, n, sort, false, false); } /** Finds the top n @@ -417,7 +335,7 @@ public class IndexSearcher { * {@link BooleanQuery#getMaxClauseCount()} clauses. */ public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException { - return searchAfter(after, query, null, n, sort, false, false); + return searchAfter(after, query, n, sort, false, false); } /** Finds the top n @@ -436,14 +354,14 @@ public class IndexSearcher { * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ - public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int numHits, Sort sort, + public TopFieldDocs searchAfter(ScoreDoc after, Query query, int numHits, Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException { if (after != null && !(after instanceof FieldDoc)) { // TODO: if we fix type safety of TopFieldDocs we can // remove this throw new IllegalArgumentException("after must be a FieldDoc; got " + after); } - return searchAfter((FieldDoc) after, wrapFilter(query, filter), numHits, sort, doDocScores, doMaxScore); + return searchAfter((FieldDoc) after, query, numHits, sort, doDocScores, doMaxScore); } private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort sort, diff --git a/lucene/core/src/java/org/apache/lucene/search/TopDocs.java b/lucene/core/src/java/org/apache/lucene/search/TopDocs.java index e54c1002642..99a90699bc0 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopDocs.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopDocs.java @@ -22,7 +22,6 @@ import org.apache.lucene.util.PriorityQueue; import java.io.IOException; /** Represents hits returned by {@link - * IndexSearcher#search(Query,Filter,int)} and {@link * IndexSearcher#search(Query,int)}. */ public class TopDocs { diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldDocs.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldDocs.java index 42b3f84b88c..59cfc71b1b1 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopFieldDocs.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldDocs.java @@ -19,7 +19,7 @@ package org.apache.lucene.search; /** Represents hits returned by {@link - * IndexSearcher#search(Query,Filter,int,Sort)}. + * IndexSearcher#search(Query,int,Sort)}. */ public class TopFieldDocs extends TopDocs { diff --git a/lucene/core/src/java/org/apache/lucene/search/package-info.java b/lucene/core/src/java/org/apache/lucene/search/package-info.java index 34a180f93ab..3c36c4c3219 100644 --- a/lucene/core/src/java/org/apache/lucene/search/package-info.java +++ b/lucene/core/src/java/org/apache/lucene/search/package-info.java @@ -40,8 +40,7 @@ * on implementing your own Query class, see Custom Queries -- Expert Level below. *

* To perform a search, applications usually call {@link - * org.apache.lucene.search.IndexSearcher#search(Query,int)} or {@link - * org.apache.lucene.search.IndexSearcher#search(Query,Filter,int)}. + * org.apache.lucene.search.IndexSearcher#search(Query,int)}. *

* Once a Query has been created and submitted to the {@link org.apache.lucene.search.IndexSearcher IndexSearcher}, the scoring * process begins. After some infrastructure setup, control finally passes to the {@link org.apache.lucene.search.Weight Weight} diff --git a/lucene/core/src/test/org/apache/lucene/TestDemo.java b/lucene/core/src/test/org/apache/lucene/TestDemo.java index b512b5f995f..1812ed2106e 100644 --- a/lucene/core/src/test/org/apache/lucene/TestDemo.java +++ b/lucene/core/src/test/org/apache/lucene/TestDemo.java @@ -61,7 +61,7 @@ public class TestDemo extends LuceneTestCase { assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -73,7 +73,7 @@ public class TestDemo extends LuceneTestCase { PhraseQuery phraseQuery = new PhraseQuery(); phraseQuery.add(new Term("fieldname", "to")); phraseQuery.add(new Term("fieldname", "be")); - assertEquals(1, isearcher.search(phraseQuery, null, 1).totalHits); + assertEquals(1, isearcher.search(phraseQuery, 1).totalHits); ireader.close(); directory.close(); diff --git a/lucene/core/src/test/org/apache/lucene/TestSearch.java b/lucene/core/src/test/org/apache/lucene/TestSearch.java index d5002cabe69..470a95dff6b 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearch.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearch.java @@ -57,7 +57,7 @@ public class TestSearch extends LuceneTestCase { try { IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); assertTrue("score is not negative: " + hits[0].score, hits[0].score < 0); @@ -147,7 +147,7 @@ public class TestSearch extends LuceneTestCase { System.out.println("TEST: query=" + query); } - hits = searcher.search(query, null, 1000, sort).scoreDocs; + hits = searcher.search(query, 1000, sort).scoreDocs; out.println(hits.length + " total results"); for (int i = 0 ; i < hits.length && i < 10; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java index c276f9ffa50..3827d6093dd 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java @@ -117,7 +117,7 @@ public class TestSearchForDuplicates extends LuceneTestCase { final Sort sort = new Sort(SortField.FIELD_SCORE, new SortField(ID_FIELD, SortField.Type.INT)); - ScoreDoc[] hits = searcher.search(query, null, MAX_DOCS, sort).scoreDocs; + ScoreDoc[] hits = searcher.search(query, MAX_DOCS, sort).scoreDocs; printHits(out, hits, searcher); checkHits(hits, MAX_DOCS, searcher); @@ -130,7 +130,7 @@ public class TestSearchForDuplicates extends LuceneTestCase { booleanQuery.add(new TermQuery(new Term(PRIORITY_FIELD, MED_PRIORITY)), BooleanClause.Occur.SHOULD); out.println("Query: " + booleanQuery.toString(PRIORITY_FIELD)); - hits = searcher.search(booleanQuery, null, MAX_DOCS, sort).scoreDocs; + hits = searcher.search(booleanQuery, MAX_DOCS, sort).scoreDocs; printHits(out, hits, searcher); checkHits(hits, MAX_DOCS, searcher); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java index 43ab7e2b84e..060b20ecb9a 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldDocValuesFormat.java @@ -107,7 +107,7 @@ public class TestPerFieldDocValuesFormat extends BaseDocValuesFormatTestCase { assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java index e28170ca891..7ca832a2126 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestDocument.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestDocument.java @@ -218,7 +218,7 @@ public class TestDocument extends LuceneTestCase { Query query = new TermQuery(new Term("keyword", "test1")); // ensure that queries return expected results without DateFilter first - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); doAssert(searcher.doc(hits[0].doc)); @@ -250,7 +250,7 @@ public class TestDocument extends LuceneTestCase { query.add(new Term("indexed_not_tokenized", "test1")); query.add(new Term("indexed_not_tokenized", "test2")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); doAssert(searcher.doc(hits[0].doc)); @@ -332,7 +332,7 @@ public class TestDocument extends LuceneTestCase { Query query = new TermQuery(new Term("keyword", "test")); // ensure that queries return expected results without DateFilter first - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); int result = 0; for (int i = 0; i < 3; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java index 4100dda9a66..73e3555dc59 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestCodecs.java @@ -304,17 +304,6 @@ public class TestCodecs extends LuceneTestCase { dir.close(); } - private ScoreDoc[] search(final IndexWriter writer, final Query q, final int n) throws IOException { - final IndexReader reader = writer.getReader(); - final IndexSearcher searcher = newSearcher(reader); - try { - return searcher.search(q, null, n).scoreDocs; - } - finally { - reader.close(); - } - } - private class Verify extends Thread { final Fields termsDict; final FieldData[] fields; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java index 0a20e5f32a1..1c4a0ad4e5d 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java @@ -672,7 +672,7 @@ public class TestDeletionPolicy extends LuceneTestCase { writer.close(); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(16, hits.length); reader.close(); @@ -690,7 +690,7 @@ public class TestDeletionPolicy extends LuceneTestCase { IndexReader rwReader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(rwReader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // Simplistic check: just verify only the past N segments_N's still @@ -708,7 +708,7 @@ public class TestDeletionPolicy extends LuceneTestCase { // Work backwards in commits on what the expected // count should be. searcher = newSearcher(reader); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(expectedCount, hits.length); if (expectedCount == 0) { expectedCount = 16; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java index 6b9347943fa..3ee7d2ad4c7 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDirectoryReaderReopen.java @@ -272,7 +272,7 @@ public class TestDirectoryReaderReopen extends LuceneTestCase { IndexSearcher searcher = newSearcher(refreshed); ScoreDoc[] hits = searcher.search( new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))), - null, 1000).scoreDocs; + 1000).scoreDocs; if (hits.length > 0) { searcher.doc(hits[0].doc); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index e568e4fc0fa..4f472aa4c77 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -444,7 +444,7 @@ public class TestIndexWriter extends LuceneTestCase { IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals(10, hits.length); reader.close(); @@ -466,7 +466,7 @@ public class TestIndexWriter extends LuceneTestCase { writer.close(); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals(27, hits.length); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java index 735e2e051da..380471788d4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterCommit.java @@ -52,7 +52,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { Term searchTerm = new Term("content", "aaa"); DirectoryReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("first number of hits", 14, hits.length); reader.close(); @@ -65,7 +65,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { } IndexReader r = DirectoryReader.open(dir); searcher = newSearcher(r); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("reader incorrectly sees changes from writer", 14, hits.length); r.close(); assertTrue("reader should have still been current", reader.isCurrent()); @@ -77,7 +77,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { IndexReader r = DirectoryReader.open(dir); searcher = newSearcher(r); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("reader did not see changes after writer was closed", 47, hits.length); r.close(); reader.close(); @@ -108,7 +108,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { Term searchTerm = new Term("content", "aaa"); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("first number of hits", 14, hits.length); reader.close(); @@ -123,7 +123,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("reader incorrectly sees changes from writer", 14, hits.length); reader.close(); @@ -134,7 +134,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("saw changes after writer.abort", 14, hits.length); reader.close(); @@ -156,7 +156,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { } IndexReader r = DirectoryReader.open(dir); searcher = newSearcher(r); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("reader incorrectly sees changes from writer", 14, hits.length); r.close(); } @@ -164,7 +164,7 @@ public class TestIndexWriterCommit extends LuceneTestCase { writer.close(); IndexReader r = DirectoryReader.open(dir); searcher = newSearcher(r); - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("didn't see changes after close", 218, hits.length); r.close(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java index dd3b21e002a..771f5d4d026 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java @@ -473,7 +473,7 @@ public class TestIndexWriterDelete extends LuceneTestCase { private int getHitCount(Directory dir, Term term) throws IOException { IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - int hitCount = searcher.search(new TermQuery(term), null, 1000).totalHits; + int hitCount = searcher.search(new TermQuery(term), 1000).totalHits; reader.close(); return hitCount; } @@ -656,7 +656,7 @@ public class TestIndexWriterDelete extends LuceneTestCase { IndexSearcher searcher = newSearcher(newReader); ScoreDoc[] hits = null; try { - hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; } catch (IOException e) { e.printStackTrace(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java index 2fdbfea3276..cb2deb74089 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMaxDocs.java @@ -67,7 +67,7 @@ public class TestIndexWriterMaxDocs extends LuceneTestCase { assertEquals(IndexWriter.MAX_DOCS, hits.totalHits); // Sort by docID reversed: - hits = searcher.search(new TermQuery(new Term("field", "text")), null, 10, new Sort(new SortField(null, SortField.Type.DOC, true))); + hits = searcher.search(new TermQuery(new Term("field", "text")), 10, new Sort(new SortField(null, SortField.Type.DOC, true))); assertEquals(IndexWriter.MAX_DOCS, hits.totalHits); assertEquals(10, hits.scoreDocs.length); assertEquals(IndexWriter.MAX_DOCS-1, hits.scoreDocs[0].doc); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java index 5c1ebd3a5bf..5eff328490d 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java @@ -202,7 +202,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase { assertEquals("first docFreq", 57, reader.docFreq(searchTerm)); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(searchTerm), 1000).scoreDocs; assertEquals("first number of hits", 57, hits.length); reader.close(); @@ -396,7 +396,7 @@ public class TestIndexWriterOnDiskFull extends LuceneTestCase { searcher = newSearcher(reader); try { - hits = searcher.search(new TermQuery(searchTerm), null, END_COUNT).scoreDocs; + hits = searcher.search(new TermQuery(searchTerm), END_COUNT).scoreDocs; } catch (IOException e) { e.printStackTrace(System.out); fail(testName + ": exception when searching: " + e); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java index 43417764f7d..936e518c302 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestLazyProxSkipping.java @@ -114,7 +114,7 @@ public class TestLazyProxSkipping extends LuceneTestCase { PhraseQuery pq = new PhraseQuery(); pq.add(new Term(this.field, this.term1)); pq.add(new Term(this.field, this.term2)); - return this.searcher.search(pq, null, 1000).scoreDocs; + return this.searcher.search(pq, 1000).scoreDocs; } private void performTest(int numHits) throws IOException { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java b/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java index 1a57b0e2e09..07b731b8d5f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestManyFields.java @@ -107,7 +107,7 @@ public class TestManyFields extends LuceneTestCase { IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - int totalHits = searcher.search(new TermQuery(new Term("field", "aaa")), null, 1).totalHits; + int totalHits = searcher.search(new TermQuery(new Term("field", "aaa")), 1).totalHits; assertEquals(n*100, totalHits); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java index ae6c37212b3..9813038375f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelCompositeReader.java @@ -402,8 +402,8 @@ public class TestParallelCompositeReader extends LuceneTestCase { } private void queryTest(Query query) throws IOException { - ScoreDoc[] parallelHits = parallel.search(query, null, 1000).scoreDocs; - ScoreDoc[] singleHits = single.search(query, null, 1000).scoreDocs; + ScoreDoc[] parallelHits = parallel.search(query, 1000).scoreDocs; + ScoreDoc[] singleHits = single.search(query, 1000).scoreDocs; assertEquals(parallelHits.length, singleHits.length); for(int i = 0; i < parallelHits.length; i++) { assertEquals(parallelHits[i].score, singleHits[i].score, 0.001f); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java index 6aedca76d35..dd404e8c302 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestParallelLeafReader.java @@ -244,8 +244,8 @@ public class TestParallelLeafReader extends LuceneTestCase { } private void queryTest(Query query) throws IOException { - ScoreDoc[] parallelHits = parallel.search(query, null, 1000).scoreDocs; - ScoreDoc[] singleHits = single.search(query, null, 1000).scoreDocs; + ScoreDoc[] parallelHits = parallel.search(query, 1000).scoreDocs; + ScoreDoc[] singleHits = single.search(query, 1000).scoreDocs; assertEquals(parallelHits.length, singleHits.length); for(int i = 0; i < parallelHits.length; i++) { assertEquals(parallelHits[i].score, singleHits[i].score, 0.001f); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java index 63d999b09f0..396198cb996 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java @@ -132,11 +132,11 @@ public class TestBoolean2 extends LuceneTestCase { // sometimes return a default impl around the scorer so that we can // compare BS1 and BS2 TopScoreDocCollector collector = TopScoreDocCollector.create(1000); - searcher.search(query, null, collector); + searcher.search(query, collector); ScoreDoc[] hits1 = collector.topDocs().scoreDocs; collector = TopScoreDocCollector.create(1000); - searcher.search(query, null, collector); + searcher.search(query, collector); ScoreDoc[] hits2 = collector.topDocs().scoreDocs; assertEquals(mulFactor * collector.totalHits, @@ -285,13 +285,13 @@ public class TestBoolean2 extends LuceneTestCase { TopFieldCollector collector = TopFieldCollector.create(sort, 1000, false, true, true); - searcher.search(q1, null, collector); + searcher.search(q1, collector); ScoreDoc[] hits1 = collector.topDocs().scoreDocs; collector = TopFieldCollector.create(sort, 1000, false, true, true); - searcher.search(q1, null, collector); + searcher.search(q1, collector); ScoreDoc[] hits2 = collector.topDocs().scoreDocs; tot+=hits2.length; CheckHits.checkEqual(q1, hits1, hits2); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java index 083075e2f17..d70189576e3 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanMinShouldMatch.java @@ -87,7 +87,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase { public void verifyNrHits(Query q, int expected) throws Exception { // bs1 - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; if (expected != h.length) { printHits(getTestName(), h, s); } @@ -349,8 +349,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase { // Can't use Hits because normalized scores will mess things // up. The non-sorting version of search() that returns TopDocs // will not normalize scores. - TopDocs top1 = s.search(q1,null,100); - TopDocs top2 = s.search(q2,null,100); + TopDocs top1 = s.search(q1,100); + TopDocs top2 = s.search(q2,100); if (i < 100) { QueryUtils.check(random(), q1,s); QueryUtils.check(random(), q2,s); @@ -410,8 +410,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase { BooleanQuery q2 = new BooleanQuery(); q2.add(new TermQuery(new Term("data", "1")), BooleanClause.Occur.SHOULD); q2.setMinimumNumberShouldMatch(1); - TopDocs top1 = s.search(q1,null,100); - TopDocs top2 = s.search(q2,null,100); + TopDocs top1 = s.search(q1,100); + TopDocs top2 = s.search(q2,100); assertSubsetOfSameScores(q2, top1, top2); } finally { s.setSimilarity(oldSimilarity); @@ -432,8 +432,8 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase { BooleanQuery q2 = new BooleanQuery(); q2.add(new TermQuery(new Term("data", "1")), BooleanClause.Occur.SHOULD); q2.add(new TermQuery(new Term("data", "Z")), BooleanClause.Occur.MUST_NOT); - TopDocs top1 = s.search(q1,null,100); - TopDocs top2 = s.search(q2,null,100); + TopDocs top1 = s.search(q1,100); + TopDocs top2 = s.search(q2,100); assertSubsetOfSameScores(q2, top1, top2); } finally { s.setSimilarity(oldSimilarity); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java index 2f7de6fa0cd..d43c6f18358 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java @@ -52,7 +52,7 @@ public class TestBooleanOr extends LuceneTestCase { private int search(Query q) throws IOException { QueryUtils.check(random(), q,searcher); - return searcher.search(q, null, 1000).totalHits; + return searcher.search(q, 1000).totalHits; } public void testElements() throws IOException { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java index 0aa716af7a8..664cd3b4ed1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java @@ -55,7 +55,7 @@ public class TestBooleanScorer extends LuceneTestCase { query.add(new TermQuery(new Term(FIELD, "9")), BooleanClause.Occur.MUST_NOT); IndexSearcher indexSearcher = newSearcher(ir); - ScoreDoc[] hits = indexSearcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = indexSearcher.search(query, 1000).scoreDocs; assertEquals("Number of matched documents", 2, hits.length); ir.close(); directory.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java index c1ed29f6757..818b94dd93a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java @@ -71,16 +71,16 @@ public class TestCachingWrapperFilter extends LuceneTestCase { IOUtils.close(ir, dir); super.tearDown(); } - + private void assertFilterEquals(Filter f1, Filter f2) throws Exception { Query query = new MatchAllDocsQuery(); - TopDocs hits1 = is.search(query, f1, ir.maxDoc()); - TopDocs hits2 = is.search(query, f2, ir.maxDoc()); + TopDocs hits1 = is.search(new FilteredQuery(query, f1), ir.maxDoc()); + TopDocs hits2 = is.search(new FilteredQuery(query, f2), ir.maxDoc()); assertEquals(hits1.totalHits, hits2.totalHits); CheckHits.checkEqual(query, hits1.scoreDocs, hits2.scoreDocs); // now do it again to confirm caching works - TopDocs hits3 = is.search(query, f1, ir.maxDoc()); - TopDocs hits4 = is.search(query, f2, ir.maxDoc()); + TopDocs hits3 = is.search(new FilteredQuery(query, f1), ir.maxDoc()); + TopDocs hits4 = is.search(new FilteredQuery(query, f2), ir.maxDoc()); assertEquals(hits3.totalHits, hits4.totalHits); CheckHits.checkEqual(query, hits3.scoreDocs, hits4.scoreDocs); } @@ -319,7 +319,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { CachingWrapperFilter filter = new CachingWrapperFilter(startFilter, FilterCachingPolicy.ALWAYS_CACHE); - docs = searcher.search(new MatchAllDocsQuery(), filter, 1); + docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1); assertTrue(filter.ramBytesUsed() > 0); assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits); @@ -356,7 +356,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { searcher = newSearcher(reader, false); missCount = filter.missCount; - docs = searcher.search(new MatchAllDocsQuery(), filter, 1); + docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1); assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits); // cache hit @@ -370,7 +370,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { reader = refreshReader(reader); searcher = newSearcher(reader, false); - docs = searcher.search(new MatchAllDocsQuery(), filter, 1); + docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1); assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits); missCount = filter.missCount; assertTrue(missCount > 0); @@ -389,7 +389,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { reader = refreshReader(reader); searcher = newSearcher(reader, false); - docs = searcher.search(new MatchAllDocsQuery(), filter, 1); + docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1); assertEquals("[query + filter] Should find 2 hits...", 2, docs.totalHits); assertTrue(filter.missCount > missCount); missCount = filter.missCount; @@ -405,7 +405,7 @@ public class TestCachingWrapperFilter extends LuceneTestCase { reader = refreshReader(reader); searcher = newSearcher(reader, false); - docs = searcher.search(new MatchAllDocsQuery(), filter, 1); + docs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 1); assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits); // CWF reused the same entry (it dynamically applied the deletes): assertEquals(missCount, filter.missCount); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java index ecf0c7797c8..6bea511a80c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreQuery.java @@ -144,12 +144,12 @@ public class TestConstantScoreQuery extends LuceneTestCase { Query query = new ConstantScoreQuery(filterB); IndexSearcher s = newSearcher(r); - assertEquals(1, s.search(query, filterB, 1).totalHits); // Query for field:b, Filter field:b + assertEquals(1, s.search(new FilteredQuery(query, filterB), 1).totalHits); // Query for field:b, Filter field:b Filter filterA = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "a")))); query = new ConstantScoreQuery(filterA); - assertEquals(0, s.search(query, filterB, 1).totalHits); // Query field:b, Filter field:a + assertEquals(0, s.search(new FilteredQuery(query, filterB), 1).totalHits); // Query field:b, Filter field:a r.close(); d.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestCustomSearcherSort.java b/lucene/core/src/test/org/apache/lucene/search/TestCustomSearcherSort.java index 7f638e1edb8..07f6fbd2e2d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestCustomSearcherSort.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestCustomSearcherSort.java @@ -112,7 +112,7 @@ public class TestCustomSearcherSort extends LuceneTestCase { // make sure the documents returned by the search match the expected list private void matchHits(IndexSearcher searcher, Sort sort) throws IOException { // make a query without sorting first - ScoreDoc[] hitsByRank = searcher.search(query, null, Integer.MAX_VALUE).scoreDocs; + ScoreDoc[] hitsByRank = searcher.search(query, Integer.MAX_VALUE).scoreDocs; checkHits(hitsByRank, "Sort by rank: "); // check for duplicates Map resultMap = new TreeMap<>(); // store hits in TreeMap - TreeMap does not allow duplicates; existing @@ -124,7 +124,7 @@ public class TestCustomSearcherSort extends LuceneTestCase { } // now make a query using the sort criteria - ScoreDoc[] resultSort = searcher.search(query, null, Integer.MAX_VALUE, + ScoreDoc[] resultSort = searcher.search(query, Integer.MAX_VALUE, sort).scoreDocs; checkHits(resultSort, "Sort by custom criteria: "); // check for duplicates @@ -192,23 +192,23 @@ public class TestCustomSearcherSort extends LuceneTestCase { } @Override - public TopFieldDocs search(Query query, Filter filter, int nDocs, Sort sort) + public TopFieldDocs search(Query query, int nDocs, Sort sort) throws IOException { BooleanQuery bq = new BooleanQuery(); bq.add(query, BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term("mandant", Integer.toString(switcher))), BooleanClause.Occur.MUST); - return super.search(bq, filter, nDocs, sort); + return super.search(bq, nDocs, sort); } @Override - public TopDocs search(Query query, Filter filter, int nDocs) + public TopDocs search(Query query, int nDocs) throws IOException { BooleanQuery bq = new BooleanQuery(); bq.add(query, BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term("mandant", Integer.toString(switcher))), BooleanClause.Occur.MUST); - return super.search(bq, filter, nDocs); + return super.search(bq, nDocs); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDateFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestDateFilter.java index 2226d8ad429..0ae89b2d6e1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDateFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDateFilter.java @@ -76,23 +76,23 @@ public class TestDateFilter extends LuceneTestCase { ScoreDoc[] result; // ensure that queries return expected results without DateFilter first - result = searcher.search(query1, null, 1000).scoreDocs; + result = searcher.search(query1, 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query2, null, 1000).scoreDocs; + result = searcher.search(query2, 1000).scoreDocs; assertEquals(1, result.length); // run queries with DateFilter - result = searcher.search(query1, df1, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query1, df1), 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query1, df2, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query1, df2), 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query2, df1, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query2, df1), 1000).scoreDocs; assertEquals(1, result.length); - result = searcher.search(query2, df2, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query2, df2), 1000).scoreDocs; assertEquals(0, result.length); reader.close(); indexStore.close(); @@ -140,23 +140,23 @@ public class TestDateFilter extends LuceneTestCase { ScoreDoc[] result; // ensure that queries return expected results without DateFilter first - result = searcher.search(query1, null, 1000).scoreDocs; + result = searcher.search(query1, 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query2, null, 1000).scoreDocs; + result = searcher.search(query2, 1000).scoreDocs; assertEquals(1, result.length); // run queries with DateFilter - result = searcher.search(query1, df1, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query1, df1), 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query1, df2, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query1, df2), 1000).scoreDocs; assertEquals(0, result.length); - result = searcher.search(query2, df1, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query2, df1), 1000).scoreDocs; assertEquals(1, result.length); - result = searcher.search(query2, df2, 1000).scoreDocs; + result = searcher.search(new FilteredQuery(query2, df2), 1000).scoreDocs; assertEquals(0, result.length); reader.close(); indexStore.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDateSort.java b/lucene/core/src/test/org/apache/lucene/search/TestDateSort.java index 6182d937c68..302b43e36ad 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDateSort.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDateSort.java @@ -83,7 +83,7 @@ public class TestDateSort extends LuceneTestCase { // Execute the search and process the search results. String[] actualOrder = new String[5]; - ScoreDoc[] hits = searcher.search(query, null, 1000, sort).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000, sort).scoreDocs; for (int i = 0; i < hits.length; i++) { StoredDocument document = searcher.doc(hits[i].doc); String text = document.get(TEXT_FIELD); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java index 384f99efcf0..072d14acb24 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java @@ -209,7 +209,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { q.add(tq("hed", "elephant")); QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("all docs should match " + q.toString(), 4, h.length); @@ -233,7 +233,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { q.add(tq("dek", "elephant")); QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("3 docs should match " + q.toString(), 3, h.length); @@ -258,7 +258,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { q.add(tq("dek", "elephant")); QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("all docs should match " + q.toString(), 4, h.length); @@ -281,7 +281,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { q.add(tq("dek", "elephant")); QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("3 docs should match " + q.toString(), 3, h.length); @@ -320,7 +320,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("3 docs should match " + q.toString(), 3, h.length); @@ -352,7 +352,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { } QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { assertEquals("4 docs should match " + q.toString(), 4, h.length); @@ -388,7 +388,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { } QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { @@ -442,7 +442,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { } QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; try { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDocIdSet.java b/lucene/core/src/test/org/apache/lucene/search/TestDocIdSet.java index 219f9cb0890..57835227886 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDocIdSet.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDocIdSet.java @@ -133,7 +133,7 @@ public class TestDocIdSet extends LuceneTestCase { } }; - Assert.assertEquals(0, searcher.search(new MatchAllDocsQuery(), f, 10).totalHits); + Assert.assertEquals(0, searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), 10).totalHits); reader.close(); dir.close(); } @@ -179,7 +179,7 @@ public class TestDocIdSet extends LuceneTestCase { } }; - Assert.assertEquals(0, searcher.search(new MatchAllDocsQuery(), f, 10).totalHits); + Assert.assertEquals(0, searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), 10).totalHits); reader.close(); dir.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java index bb5887e47bf..72d7ccb3032 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestElevationComparator.java @@ -80,7 +80,7 @@ public class TestElevationComparator extends LuceneTestCase { ); TopDocsCollector topCollector = TopFieldCollector.create(sort, 50, false, true, true); - searcher.search(newq, null, topCollector); + searcher.search(newq, topCollector); TopDocs topDocs = topCollector.topDocs(0, 10); int nDocsReturned = topDocs.scoreDocs.length; diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheTermsFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheTermsFilter.java index 7dac5157e71..255e903f860 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheTermsFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFieldCacheTermsFilter.java @@ -21,7 +21,6 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LuceneTestCase; - import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; @@ -57,18 +56,18 @@ public class TestFieldCacheTermsFilter extends LuceneTestCase { List terms = new ArrayList<>(); terms.add("5"); - results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs; + results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs; assertEquals("Must match nothing", 0, results.length); terms = new ArrayList<>(); terms.add("10"); - results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs; + results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs; assertEquals("Must match 1", 1, results.length); terms = new ArrayList<>(); terms.add("10"); terms.add("20"); - results = searcher.search(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0])), numDocs).scoreDocs; + results = searcher.search(new FilteredQuery(q, new DocValuesTermsFilter(fieldName, terms.toArray(new String[0]))), numDocs).scoreDocs; assertEquals("Must match 2", 2, results.length); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java index 5192d7028b2..2e4af444679 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFilteredQuery.java @@ -136,33 +136,33 @@ public class TestFilteredQuery extends LuceneTestCase { private void tFilteredQuery(final boolean useRandomAccess) throws Exception { Query filteredquery = new FilteredQuery(query, filter, randomFilterStrategy(random(), useRandomAccess)); - ScoreDoc[] hits = searcher.search (filteredquery, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search (filteredquery, 1000).scoreDocs; assertEquals (1, hits.length); assertEquals (1, hits[0].doc); QueryUtils.check(random(), filteredquery,searcher); - hits = searcher.search (filteredquery, null, 1000, new Sort(new SortField("sorter", SortField.Type.STRING))).scoreDocs; + hits = searcher.search (filteredquery, 1000, new Sort(new SortField("sorter", SortField.Type.STRING))).scoreDocs; assertEquals (1, hits.length); assertEquals (1, hits[0].doc); filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "one")), filter, randomFilterStrategy(random(), useRandomAccess)); - hits = searcher.search (filteredquery, null, 1000).scoreDocs; + hits = searcher.search (filteredquery, 1000).scoreDocs; assertEquals (2, hits.length); QueryUtils.check(random(), filteredquery,searcher); filteredquery = new FilteredQuery(new MatchAllDocsQuery(), filter, randomFilterStrategy(random(), useRandomAccess)); - hits = searcher.search (filteredquery, null, 1000).scoreDocs; + hits = searcher.search (filteredquery, 1000).scoreDocs; assertEquals (2, hits.length); QueryUtils.check(random(), filteredquery,searcher); filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "x")), filter, randomFilterStrategy(random(), useRandomAccess)); - hits = searcher.search (filteredquery, null, 1000).scoreDocs; + hits = searcher.search (filteredquery, 1000).scoreDocs; assertEquals (1, hits.length); assertEquals (3, hits[0].doc); QueryUtils.check(random(), filteredquery,searcher); filteredquery = new FilteredQuery(new TermQuery (new Term ("field", "y")), filter, randomFilterStrategy(random(), useRandomAccess)); - hits = searcher.search (filteredquery, null, 1000).scoreDocs; + hits = searcher.search (filteredquery, 1000).scoreDocs; assertEquals (0, hits.length); QueryUtils.check(random(), filteredquery,searcher); @@ -209,8 +209,8 @@ public class TestFilteredQuery extends LuceneTestCase { * Tests whether the scores of the two queries are the same. */ public void assertScoreEquals(Query q1, Query q2) throws Exception { - ScoreDoc[] hits1 = searcher.search (q1, null, 1000).scoreDocs; - ScoreDoc[] hits2 = searcher.search (q2, null, 1000).scoreDocs; + ScoreDoc[] hits1 = searcher.search (q1, 1000).scoreDocs; + ScoreDoc[] hits2 = searcher.search (q2, 1000).scoreDocs; assertEquals(hits1.length, hits2.length); @@ -233,7 +233,7 @@ public class TestFilteredQuery extends LuceneTestCase { "sorter", "b", "d", true, true); Query filteredquery = new FilteredQuery(rq, filter, randomFilterStrategy(random(), useRandomAccess)); - ScoreDoc[] hits = searcher.search(filteredquery, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(filteredquery, 1000).scoreDocs; assertEquals(2, hits.length); QueryUtils.check(random(), filteredquery,searcher); } @@ -251,7 +251,7 @@ public class TestFilteredQuery extends LuceneTestCase { bq.add(query, BooleanClause.Occur.MUST); query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), randomFilterStrategy(random(), useRandomAccess)); bq.add(query, BooleanClause.Occur.MUST); - ScoreDoc[] hits = searcher.search(bq, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(bq, 1000).scoreDocs; assertEquals(0, hits.length); QueryUtils.check(random(), query,searcher); } @@ -269,7 +269,7 @@ public class TestFilteredQuery extends LuceneTestCase { bq.add(query, BooleanClause.Occur.SHOULD); query = new FilteredQuery(new TermQuery(new Term("field", "one")), new SingleDocTestFilter(1), randomFilterStrategy(random(), useRandomAccess)); bq.add(query, BooleanClause.Occur.SHOULD); - ScoreDoc[] hits = searcher.search(bq, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(bq, 1000).scoreDocs; assertEquals(2, hits.length); QueryUtils.check(random(), query,searcher); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java b/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java index d0c6a985d8d..f0444798270 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFilteredSearch.java @@ -75,7 +75,7 @@ public class TestFilteredSearch extends LuceneTestCase { IndexReader reader = DirectoryReader.open(directory); IndexSearcher indexSearcher = newSearcher(reader); - ScoreDoc[] hits = indexSearcher.search(booleanQuery, filter, 1000).scoreDocs; + ScoreDoc[] hits = indexSearcher.search(new FilteredQuery(booleanQuery, filter), 1000).scoreDocs; assertEquals("Number of matched documents", 1, hits.length); reader.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java index 82844106661..bb363efbb42 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestFuzzyQuery.java @@ -48,7 +48,7 @@ public class TestFuzzyQuery extends LuceneTestCase { writer.close(); FuzzyQuery query = new FuzzyQuery(new Term("field", "abc"), FuzzyQuery.defaultMaxEdits, 1); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); reader.close(); directory.close(); @@ -70,32 +70,32 @@ public class TestFuzzyQuery extends LuceneTestCase { writer.close(); FuzzyQuery query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 0); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); // same with prefix query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(2, hits.length); query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 6); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // test scoring query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("3 documents should match", 3, hits.length); List order = Arrays.asList("bbbbb","abbbb","aabbb"); for (int i = 0; i < hits.length; i++) { @@ -107,7 +107,7 @@ public class TestFuzzyQuery extends LuceneTestCase { // test pq size by supplying maxExpansions=2 // This query would normally return 3 documents, because 3 terms match (see above): query = new FuzzyQuery(new Term("field", "bbbbb"), FuzzyQuery.defaultMaxEdits, 0, 2, false); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("only 2 documents should match", 2, hits.length); order = Arrays.asList("bbbbb","abbbb"); for (int i = 0; i < hits.length; i++) { @@ -118,15 +118,15 @@ public class TestFuzzyQuery extends LuceneTestCase { // not similar enough: query = new FuzzyQuery(new Term("field", "xxxxx"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new FuzzyQuery(new Term("field", "aaccc"), FuzzyQuery.defaultMaxEdits, 0); // edit distance to "aaaaa" = 3 - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // query identical to a word in the index: query = new FuzzyQuery(new Term("field", "aaaaa"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); // default allows for up to two edits: @@ -135,7 +135,7 @@ public class TestFuzzyQuery extends LuceneTestCase { // query similar to a word in the index: query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); @@ -143,63 +143,63 @@ public class TestFuzzyQuery extends LuceneTestCase { // now with prefix query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(2, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); query = new FuzzyQuery(new Term("field", "aaaac"), FuzzyQuery.defaultMaxEdits, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); // now with prefix query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new FuzzyQuery(new Term("field", "ddddX"), FuzzyQuery.defaultMaxEdits, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // different field = no match: query = new FuzzyQuery(new Term("anotherfield", "ddddX"), FuzzyQuery.defaultMaxEdits, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); reader.close(); @@ -234,7 +234,7 @@ public class TestFuzzyQuery extends LuceneTestCase { FuzzyQuery query = new FuzzyQuery(new Term("field", "WEBER"), 2, 1); //query.setRewriteMethod(FuzzyQuery.SCORING_BOOLEAN_QUERY_REWRITE); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(8, hits.length); reader.close(); @@ -296,7 +296,7 @@ public class TestFuzzyQuery extends LuceneTestCase { FuzzyQuery query = new FuzzyQuery(new Term("field", "lucene")); query.setRewriteMethod(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(50)); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); // normally, 'Lucenne' would be the first result as IDF will skew the score. assertEquals("Lucene", reader.document(hits[0].doc).get("field")); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java b/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java index 8b6aad44155..cef3e31539c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java @@ -81,10 +81,6 @@ public class TestIndexSearcher extends LuceneTestCase { null, new Sort(new SortField("field2", SortField.Type.STRING)) }; - Filter filters[] = new Filter[] { - null, - new QueryWrapperFilter(new TermQuery(new Term("field2", "true"))) - }; ScoreDoc afters[] = new ScoreDoc[] { null, new FieldDoc(0, 0f, new Object[] { new BytesRef("boo!") }) @@ -94,24 +90,19 @@ public class TestIndexSearcher extends LuceneTestCase { for (ScoreDoc after : afters) { for (Query query : queries) { for (Sort sort : sorts) { - for (Filter filter : filters) { - searcher.search(query, Integer.MAX_VALUE); - searcher.searchAfter(after, query, Integer.MAX_VALUE); - searcher.search(query, filter, Integer.MAX_VALUE); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE); - if (sort != null) { - searcher.search(query, Integer.MAX_VALUE, sort); - searcher.search(query, filter, Integer.MAX_VALUE, sort); - searcher.search(query, filter, Integer.MAX_VALUE, sort, true, true); - searcher.search(query, filter, Integer.MAX_VALUE, sort, true, false); - searcher.search(query, filter, Integer.MAX_VALUE, sort, false, true); - searcher.search(query, filter, Integer.MAX_VALUE, sort, false, false); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, true, true); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, true, false); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, false, true); - searcher.searchAfter(after, query, filter, Integer.MAX_VALUE, sort, false, false); - } + searcher.search(query, Integer.MAX_VALUE); + searcher.searchAfter(after, query, Integer.MAX_VALUE); + if (sort != null) { + searcher.search(query, Integer.MAX_VALUE, sort); + searcher.search(query, Integer.MAX_VALUE, sort, true, true); + searcher.search(query, Integer.MAX_VALUE, sort, true, false); + searcher.search(query, Integer.MAX_VALUE, sort, false, true); + searcher.search(query, Integer.MAX_VALUE, sort, false, false); + searcher.searchAfter(after, query, Integer.MAX_VALUE, sort); + searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, true, true); + searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, true, false); + searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, false, true); + searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, false, false); } } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java index f41497ed0e4..681258caa05 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java @@ -54,7 +54,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase { IndexSearcher is = newSearcher(ir); ScoreDoc[] hits; - hits = is.search(new MatchAllDocsQuery(), null, 1000).scoreDocs; + hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs; assertEquals(3, hits.length); assertEquals("one", is.doc(hits[0].doc).get("key")); assertEquals("two", is.doc(hits[1].doc).get("key")); @@ -65,13 +65,13 @@ public class TestMatchAllDocsQuery extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); - hits = is.search(bq, null, 1000).scoreDocs; + hits = is.search(bq, 1000).scoreDocs; assertEquals(3, hits.length); bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term("key", "three")), BooleanClause.Occur.MUST); - hits = is.search(bq, null, 1000).scoreDocs; + hits = is.search(bq, 1000).scoreDocs; assertEquals(1, hits.length); iw.deleteDocuments(new Term("key", "one")); @@ -79,7 +79,7 @@ public class TestMatchAllDocsQuery extends LuceneTestCase { ir = DirectoryReader.open(iw, true); is = newSearcher(ir); - hits = is.search(new MatchAllDocsQuery(), null, 1000).scoreDocs; + hits = is.search(new MatchAllDocsQuery(), 1000).scoreDocs; assertEquals(2, hits.length); iw.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java index 4e36ad98e55..e2995abe52e 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java @@ -90,9 +90,9 @@ public class TestMultiPhraseQuery extends LuceneTestCase { .toString()); ScoreDoc[] result; - result = searcher.search(query1, null, 1000).scoreDocs; + result = searcher.search(query1, 1000).scoreDocs; assertEquals(2, result.length); - result = searcher.search(query2, null, 1000).scoreDocs; + result = searcher.search(query2, 1000).scoreDocs; assertEquals(0, result.length); // search for "blue* pizza": @@ -110,13 +110,13 @@ public class TestMultiPhraseQuery extends LuceneTestCase { query3.add(termsWithPrefix.toArray(new Term[0])); query3.add(new Term("body", "pizza")); - result = searcher.search(query3, null, 1000).scoreDocs; + result = searcher.search(query3, 1000).scoreDocs; assertEquals(2, result.length); // blueberry pizza, bluebird pizza assertEquals("body:\"(blueberry bluebird) pizza\"", query3.toString()); // test slop: query3.setSlop(1); - result = searcher.search(query3, null, 1000).scoreDocs; + result = searcher.search(query3, 1000).scoreDocs; // just make sure no exc: searcher.explain(query3, 0); @@ -224,7 +224,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase { q.add(trouble, BooleanClause.Occur.MUST); // exception will be thrown here without fix - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals("Wrong number of hits", 2, hits.length); @@ -256,7 +256,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase { q.add(trouble, BooleanClause.Occur.MUST); // exception will be thrown here without fix for #35626: - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals("Wrong number of hits", 0, hits.length); writer.close(); reader.close(); @@ -275,7 +275,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase { q.add(new Term("body", "a")); q.add(new Term[] {new Term("body", "nope"), new Term("body", "nope")}); assertEquals("Wrong number of hits", 0, - searcher.search(q, null, 1).totalHits); + searcher.search(q, 1).totalHits); // just make sure no exc: searcher.explain(q, 0); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java index 5b2eb4b0662..3bad69713ff 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiTermConstantScore.java @@ -143,7 +143,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { // some hits match more terms then others, score should be the same - result = search.search(csrq("data", "1", "6", T, T), null, 1000).scoreDocs; + result = search.search(csrq("data", "1", "6", T, T), 1000).scoreDocs; int numHits = result.length; assertEquals("wrong number of results", 6, numHits); float score = result[0].score; @@ -152,7 +152,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { result[i].score, SCORE_COMP_THRESH); } - result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), null, 1000).scoreDocs; + result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), 1000).scoreDocs; numHits = result.length; assertEquals("wrong number of results", 6, numHits); for (int i = 0; i < numHits; i++) { @@ -160,7 +160,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { result[i].score, SCORE_COMP_THRESH); } - result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, 1000).scoreDocs; + result = search.search(csrq("data", "1", "6", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), 1000).scoreDocs; numHits = result.length; assertEquals("wrong number of results", 6, numHits); for (int i = 0; i < numHits; i++) { @@ -182,7 +182,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { BooleanQuery bq = new BooleanQuery(); bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc bq.add(csrq("data", "#", "#", T, T), BooleanClause.Occur.SHOULD); // hits no docs - result = search.search(bq, null, 1000).scoreDocs; + result = search.search(bq, 1000).scoreDocs; int numHits = result.length; assertEquals("wrong number of results", 1, numHits); float score = result[0].score; @@ -194,7 +194,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { bq = new BooleanQuery(); bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc bq.add(csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE), BooleanClause.Occur.SHOULD); // hits no docs - result = search.search(bq, null, 1000).scoreDocs; + result = search.search(bq, 1000).scoreDocs; numHits = result.length; assertEquals("wrong number of results", 1, numHits); for (int i = 0; i < numHits; i++) { @@ -205,7 +205,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { bq = new BooleanQuery(); bq.add(dummyTerm, BooleanClause.Occur.SHOULD); // hits one doc bq.add(csrq("data", "#", "#", T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), BooleanClause.Occur.SHOULD); // hits no docs - result = search.search(bq, null, 1000).scoreDocs; + result = search.search(bq, 1000).scoreDocs; numHits = result.length; assertEquals("wrong number of results", 1, numHits); for (int i = 0; i < numHits; i++) { @@ -226,7 +226,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { search.setSimilarity(new DefaultSimilarity()); Query q = csrq("data", "1", "6", T, T); q.setBoost(100); - search.search(q, null, new SimpleCollector() { + search.search(q, new SimpleCollector() { private int base = 0; private Scorer scorer; @Override @@ -259,7 +259,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { bq.add(q1, BooleanClause.Occur.SHOULD); bq.add(q2, BooleanClause.Occur.SHOULD); - ScoreDoc[] hits = search.search(bq, null, 1000).scoreDocs; + ScoreDoc[] hits = search.search(bq, 1000).scoreDocs; Assert.assertEquals(1, hits[0].doc); Assert.assertEquals(0, hits[1].doc); assertTrue(hits[0].score > hits[1].score); @@ -271,7 +271,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { bq.add(q1, BooleanClause.Occur.SHOULD); bq.add(q2, BooleanClause.Occur.SHOULD); - hits = search.search(bq, null, 1000).scoreDocs; + hits = search.search(bq, 1000).scoreDocs; Assert.assertEquals(1, hits[0].doc); Assert.assertEquals(0, hits[1].doc); assertTrue(hits[0].score > hits[1].score); @@ -283,7 +283,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { bq.add(q1, BooleanClause.Occur.SHOULD); bq.add(q2, BooleanClause.Occur.SHOULD); - hits = search.search(bq, null, 1000).scoreDocs; + hits = search.search(bq, 1000).scoreDocs; Assert.assertEquals(0, hits[0].doc); Assert.assertEquals(1, hits[1].doc); assertTrue(hits[0].score > hits[1].score); @@ -300,7 +300,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { Query rq = TermRangeQuery.newStringRange("data", "1", "4", T, T); - ScoreDoc[] expected = search.search(rq, null, 1000).scoreDocs; + ScoreDoc[] expected = search.search(rq, 1000).scoreDocs; int numHits = expected.length; // now do a boolean where which also contains a @@ -310,7 +310,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { q.add(rq, BooleanClause.Occur.MUST);// T, F); q.add(csrq("data", "1", "6", T, T), BooleanClause.Occur.MUST);// T, F); - ScoreDoc[] actual = search.search(q, null, 1000).scoreDocs; + ScoreDoc[] actual = search.search(q, 1000).scoreDocs; assertEquals("wrong numebr of hits", numHits, actual.length); for (int i = 0; i < numHits; i++) { @@ -344,110 +344,110 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { // test id, bounded on both ends - result = search.search(csrq("id", minIP, maxIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, T, T), numDocs).scoreDocs; assertEquals("find all", numDocs, result.length); - result = search.search(csrq("id", minIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("find all", numDocs, result.length); - result = search.search(csrq("id", minIP, maxIP, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, T, F), numDocs).scoreDocs; assertEquals("all but last", numDocs - 1, result.length); - result = search.search(csrq("id", minIP, maxIP, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("all but last", numDocs - 1, result.length); - result = search.search(csrq("id", minIP, maxIP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, F, T), numDocs).scoreDocs; assertEquals("all but first", numDocs - 1, result.length); - result = search.search(csrq("id", minIP, maxIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("all but first", numDocs - 1, result.length); - result = search.search(csrq("id", minIP, maxIP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, F, F), numDocs).scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); - result = search.search(csrq("id", minIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); - result = search.search(csrq("id", medIP, maxIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, maxIP, T, T), numDocs).scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); - result = search.search(csrq("id", medIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); - result = search.search(csrq("id", minIP, medIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, medIP, T, T), numDocs).scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); - result = search.search(csrq("id", minIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); // unbounded id - result = search.search(csrq("id", minIP, null, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, null, T, F), numDocs).scoreDocs; assertEquals("min and up", numDocs, result.length); - result = search.search(csrq("id", null, maxIP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", null, maxIP, F, T), numDocs).scoreDocs; assertEquals("max and down", numDocs, result.length); - result = search.search(csrq("id", minIP, null, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, null, F, F), numDocs).scoreDocs; assertEquals("not min, but up", numDocs - 1, result.length); - result = search.search(csrq("id", null, maxIP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", null, maxIP, F, F), numDocs).scoreDocs; assertEquals("not max, but down", numDocs - 1, result.length); - result = search.search(csrq("id", medIP, maxIP, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, maxIP, T, F), numDocs).scoreDocs; assertEquals("med and up, not max", maxId - medId, result.length); - result = search.search(csrq("id", minIP, medIP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, medIP, F, T), numDocs).scoreDocs; assertEquals("not min, up to med", medId - minId, result.length); // very small sets - result = search.search(csrq("id", minIP, minIP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, minIP, F, F), numDocs).scoreDocs; assertEquals("min,min,F,F", 0, result.length); - result = search.search(csrq("id", minIP, minIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, minIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("min,min,F,F", 0, result.length); - result = search.search(csrq("id", medIP, medIP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, medIP, F, F), numDocs).scoreDocs; assertEquals("med,med,F,F", 0, result.length); - result = search.search(csrq("id", medIP, medIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, medIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("med,med,F,F", 0, result.length); - result = search.search(csrq("id", maxIP, maxIP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, maxIP, F, F), numDocs).scoreDocs; assertEquals("max,max,F,F", 0, result.length); - result = search.search(csrq("id", maxIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, maxIP, F, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("max,max,F,F", 0, result.length); - result = search.search(csrq("id", minIP, minIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, minIP, T, T), numDocs).scoreDocs; assertEquals("min,min,T,T", 1, result.length); - result = search.search(csrq("id", minIP, minIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", minIP, minIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("min,min,T,T", 1, result.length); - result = search.search(csrq("id", null, minIP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", null, minIP, F, T), numDocs).scoreDocs; assertEquals("nul,min,F,T", 1, result.length); - result = search.search(csrq("id", null, minIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", null, minIP, F, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("nul,min,F,T", 1, result.length); - result = search.search(csrq("id", maxIP, maxIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, maxIP, T, T), numDocs).scoreDocs; assertEquals("max,max,T,T", 1, result.length); - result = search.search(csrq("id", maxIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, maxIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("max,max,T,T", 1, result.length); - result = search.search(csrq("id", maxIP, null, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, null, T, F), numDocs).scoreDocs; assertEquals("max,nul,T,T", 1, result.length); - result = search.search(csrq("id", maxIP, null, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", maxIP, null, T, F, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("max,nul,T,T", 1, result.length); - result = search.search(csrq("id", medIP, medIP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, medIP, T, T), numDocs).scoreDocs; assertEquals("med,med,T,T", 1, result.length); - result = search.search(csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), null, numDocs).scoreDocs; + result = search.search(csrq("id", medIP, medIP, T, T, MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE), numDocs).scoreDocs; assertEquals("med,med,T,T", 1, result.length); } @@ -469,47 +469,47 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter { // test extremes, bounded on both ends - result = search.search(csrq("rand", minRP, maxRP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, maxRP, T, T), numDocs).scoreDocs; assertEquals("find all", numDocs, result.length); - result = search.search(csrq("rand", minRP, maxRP, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, maxRP, T, F), numDocs).scoreDocs; assertEquals("all but biggest", numDocs - 1, result.length); - result = search.search(csrq("rand", minRP, maxRP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, maxRP, F, T), numDocs).scoreDocs; assertEquals("all but smallest", numDocs - 1, result.length); - result = search.search(csrq("rand", minRP, maxRP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, maxRP, F, F), numDocs).scoreDocs; assertEquals("all but extremes", numDocs - 2, result.length); // unbounded - result = search.search(csrq("rand", minRP, null, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, null, T, F), numDocs).scoreDocs; assertEquals("smallest and up", numDocs, result.length); - result = search.search(csrq("rand", null, maxRP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", null, maxRP, F, T), numDocs).scoreDocs; assertEquals("biggest and down", numDocs, result.length); - result = search.search(csrq("rand", minRP, null, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, null, F, F), numDocs).scoreDocs; assertEquals("not smallest, but up", numDocs - 1, result.length); - result = search.search(csrq("rand", null, maxRP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", null, maxRP, F, F), numDocs).scoreDocs; assertEquals("not biggest, but down", numDocs - 1, result.length); // very small sets - result = search.search(csrq("rand", minRP, minRP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, minRP, F, F), numDocs).scoreDocs; assertEquals("min,min,F,F", 0, result.length); - result = search.search(csrq("rand", maxRP, maxRP, F, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", maxRP, maxRP, F, F), numDocs).scoreDocs; assertEquals("max,max,F,F", 0, result.length); - result = search.search(csrq("rand", minRP, minRP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", minRP, minRP, T, T), numDocs).scoreDocs; assertEquals("min,min,T,T", 1, result.length); - result = search.search(csrq("rand", null, minRP, F, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", null, minRP, F, T), numDocs).scoreDocs; assertEquals("nul,min,F,T", 1, result.length); - result = search.search(csrq("rand", maxRP, maxRP, T, T), null, numDocs).scoreDocs; + result = search.search(csrq("rand", maxRP, maxRP, T, T), numDocs).scoreDocs; assertEquals("max,max,T,T", 1, result.length); - result = search.search(csrq("rand", maxRP, null, T, F), null, numDocs).scoreDocs; + result = search.search(csrq("rand", maxRP, null, T, F), numDocs).scoreDocs; assertEquals("max,nul,T,T", 1, result.length); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java index 67a64d94694..91e6ee1f9c7 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java @@ -80,7 +80,7 @@ public class TestNeedsScores extends LuceneTestCase { Query query = new MatchAllDocsQuery(); Query term = new TermQuery(new Term("field", "this")); Filter filter = new QueryWrapperFilter(new AssertNeedsScores(term, false)); - assertEquals(5, searcher.search(query, filter, 5).totalHits); + assertEquals(5, searcher.search(new FilteredQuery(query, filter), 5).totalHits); } /** when not sorting by score */ diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNot.java b/lucene/core/src/test/org/apache/lucene/search/TestNot.java index a7591b32ecb..1642f03f928 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNot.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNot.java @@ -48,7 +48,7 @@ public class TestNot extends LuceneTestCase { query.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("field", "b")), BooleanClause.Occur.MUST_NOT); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); writer.close(); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java index 0782e427ed3..f33fb034e0b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery32.java @@ -158,16 +158,16 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { case 0: type = " (constant score filter rewrite)"; q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); break; case 1: type = " (constant score boolean rewrite)"; q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); break; case 2: type = " (filter)"; - topDocs = searcher.search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), noDocs, Sort.INDEXORDER); break; default: return; @@ -222,7 +222,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { int count=3000; int upper=(count-1)*distance + (distance/3) + startOffset; NumericRangeQuery q=NumericRangeQuery.newIntRange(field, precisionStep, null, upper, true, true); - TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", count, sd.length ); @@ -232,7 +232,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().intValue()); q=NumericRangeQuery.newIntRange(field, precisionStep, null, upper, false, true); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", count, sd.length ); @@ -262,7 +262,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { int count=3000; int lower=(count-1)*distance + (distance/3) +startOffset; NumericRangeQuery q=NumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, true); - TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", noDocs-count, sd.length ); @@ -272,7 +272,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().intValue()); q=NumericRangeQuery.newIntRange(field, precisionStep, lower, null, true, false); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", noDocs-count, sd.length ); @@ -550,7 +550,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase { Filter tf=NumericRangeFilter.newFloatRange(field, precisionStep, NumericUtils.sortableIntToFloat(lower), NumericUtils.sortableIntToFloat(upper), true, true); - tTopDocs = searcher.search(new MatchAllDocsQuery(), tf, 1); + tTopDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), tf), 1); assertEquals("Returned count of range filter must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits ); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java index 42dd5188c65..7f4d3b7cd3d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNumericRangeQuery64.java @@ -167,16 +167,16 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { case 0: type = " (constant score filter rewrite)"; q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); break; case 1: type = " (constant score boolean rewrite)"; q.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); break; case 2: type = " (filter)"; - topDocs = searcher.search(new MatchAllDocsQuery(), f, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), f), noDocs, Sort.INDEXORDER); break; default: return; @@ -239,7 +239,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { int count=3000; long upper=(count-1)*distance + (distance/3) + startOffset; NumericRangeQuery q=NumericRangeQuery.newLongRange(field, precisionStep, null, upper, true, true); - TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", count, sd.length ); @@ -249,7 +249,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { assertEquals("Last doc", (count-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); q=NumericRangeQuery.newLongRange(field, precisionStep, null, upper, false, true); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", count, sd.length ); @@ -284,7 +284,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { int count=3000; long lower=(count-1)*distance + (distance/3) +startOffset; NumericRangeQuery q=NumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, true); - TopDocs topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", noDocs-count, sd.length ); @@ -294,7 +294,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { assertEquals("Last doc", (noDocs-1)*distance+startOffset, doc.getField(field).numericValue().longValue() ); q=NumericRangeQuery.newLongRange(field, precisionStep, lower, null, true, false); - topDocs = searcher.search(q, null, noDocs, Sort.INDEXORDER); + topDocs = searcher.search(q, noDocs, Sort.INDEXORDER); sd = topDocs.scoreDocs; assertNotNull(sd); assertEquals("Score doc count", noDocs-count, sd.length ); @@ -587,7 +587,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase { Filter tf=NumericRangeFilter.newDoubleRange(field, precisionStep, NumericUtils.sortableLongToDouble(lower), NumericUtils.sortableLongToDouble(upper), true, true); - tTopDocs = searcher.search(new MatchAllDocsQuery(), tf, 1); + tTopDocs = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), tf), 1); assertEquals("Returned count of range filter must be equal to inclusive range length", upper-lower+1, tTopDocs.totalHits ); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java index 3de48e9addd..aacdec2a3d4 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPhrasePrefixQuery.java @@ -88,10 +88,10 @@ public class TestPhrasePrefixQuery extends LuceneTestCase { query2.add(termsWithPrefix.toArray(new Term[0])); ScoreDoc[] result; - result = searcher.search(query1, null, 1000).scoreDocs; + result = searcher.search(query1, 1000).scoreDocs; assertEquals(2, result.length); - result = searcher.search(query2, null, 1000).scoreDocs; + result = searcher.search(query2, 1000).scoreDocs; assertEquals(0, result.length); reader.close(); indexStore.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java index 24c08676eed..f701d54037b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPhraseQuery.java @@ -113,7 +113,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(2); query.add(new Term("field", "one")); query.add(new Term("field", "five")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); QueryUtils.check(random(), query,searcher); } @@ -122,7 +122,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(3); query.add(new Term("field", "one")); query.add(new Term("field", "five")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); QueryUtils.check(random(), query,searcher); } @@ -134,7 +134,7 @@ public class TestPhraseQuery extends LuceneTestCase { // slop is zero by default query.add(new Term("field", "four")); query.add(new Term("field", "five")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("exact match", 1, hits.length); QueryUtils.check(random(), query,searcher); @@ -142,7 +142,7 @@ public class TestPhraseQuery extends LuceneTestCase { query = new PhraseQuery(); query.add(new Term("field", "two")); query.add(new Term("field", "one")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("reverse not exact", 0, hits.length); QueryUtils.check(random(), query,searcher); } @@ -152,7 +152,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(1); query.add(new Term("field", "one")); query.add(new Term("field", "two")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("in order", 1, hits.length); QueryUtils.check(random(), query,searcher); @@ -163,7 +163,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(1); query.add(new Term("field", "two")); query.add(new Term("field", "one")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("reversed, slop not 2 or more", 0, hits.length); QueryUtils.check(random(), query,searcher); } @@ -175,7 +175,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(2); // must be at least two for reverse order match query.add(new Term("field", "two")); query.add(new Term("field", "one")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); QueryUtils.check(random(), query,searcher); @@ -184,7 +184,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(2); query.add(new Term("field", "three")); query.add(new Term("field", "one")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("not sloppy enough", 0, hits.length); QueryUtils.check(random(), query,searcher); @@ -199,7 +199,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("field", "one")); query.add(new Term("field", "three")); query.add(new Term("field", "five")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("two total moves", 1, hits.length); QueryUtils.check(random(), query,searcher); @@ -209,13 +209,13 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("field", "five")); query.add(new Term("field", "three")); query.add(new Term("field", "one")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("slop of 5 not close enough", 0, hits.length); QueryUtils.check(random(), query,searcher); query.setSlop(6); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("slop of 6 just right", 1, hits.length); QueryUtils.check(random(), query,searcher); @@ -238,7 +238,7 @@ public class TestPhraseQuery extends LuceneTestCase { PhraseQuery query = new PhraseQuery(); query.add(new Term("field","stop")); query.add(new Term("field","words")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); QueryUtils.check(random(), query,searcher); @@ -267,7 +267,7 @@ public class TestPhraseQuery extends LuceneTestCase { PhraseQuery phraseQuery = new PhraseQuery(); phraseQuery.add(new Term("source", "marketing")); phraseQuery.add(new Term("source", "info")); - ScoreDoc[] hits = searcher.search(phraseQuery, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(phraseQuery, 1000).scoreDocs; assertEquals(2, hits.length); QueryUtils.check(random(), phraseQuery,searcher); @@ -276,7 +276,7 @@ public class TestPhraseQuery extends LuceneTestCase { BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.add(termQuery, BooleanClause.Occur.MUST); booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST); - hits = searcher.search(booleanQuery, null, 1000).scoreDocs; + hits = searcher.search(booleanQuery, 1000).scoreDocs; assertEquals(1, hits.length); QueryUtils.check(random(), termQuery,searcher); @@ -307,22 +307,22 @@ public class TestPhraseQuery extends LuceneTestCase { phraseQuery.add(new Term("contents","map")); phraseQuery.add(new Term("contents","entry")); - hits = searcher.search(termQuery, null, 1000).scoreDocs; + hits = searcher.search(termQuery, 1000).scoreDocs; assertEquals(3, hits.length); - hits = searcher.search(phraseQuery, null, 1000).scoreDocs; + hits = searcher.search(phraseQuery, 1000).scoreDocs; assertEquals(2, hits.length); booleanQuery = new BooleanQuery(); booleanQuery.add(termQuery, BooleanClause.Occur.MUST); booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST); - hits = searcher.search(booleanQuery, null, 1000).scoreDocs; + hits = searcher.search(booleanQuery, 1000).scoreDocs; assertEquals(2, hits.length); booleanQuery = new BooleanQuery(); booleanQuery.add(phraseQuery, BooleanClause.Occur.MUST); booleanQuery.add(termQuery, BooleanClause.Occur.MUST); - hits = searcher.search(booleanQuery, null, 1000).scoreDocs; + hits = searcher.search(booleanQuery, 1000).scoreDocs; assertEquals(2, hits.length); QueryUtils.check(random(), booleanQuery,searcher); @@ -359,7 +359,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("field", "firstname")); query.add(new Term("field", "lastname")); query.setSlop(Integer.MAX_VALUE); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); // Make sure that those matches where the terms appear closer to // each other get a higher score: @@ -407,13 +407,13 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("repeated", "part")); query.setSlop(100); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("slop of 100 just right", 1, hits.length); QueryUtils.check(random(), query,searcher); query.setSlop(99); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("slop of 99 not enough", 0, hits.length); QueryUtils.check(random(), query,searcher); } @@ -426,7 +426,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("nonexist", "found")); query.setSlop(2); // would be found this way - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("phrase without repetitions exists in 2 docs", 2, hits.length); QueryUtils.check(random(), query,searcher); @@ -437,7 +437,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("nonexist", "exist")); query.setSlop(1); // would be found - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("phrase with repetitions exists in two docs", 2, hits.length); QueryUtils.check(random(), query,searcher); @@ -448,7 +448,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("nonexist", "phrase")); query.setSlop(1000); // would not be found no matter how high the slop is - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length); QueryUtils.check(random(), query,searcher); @@ -460,7 +460,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("nonexist", "exist")); query.setSlop(1000); // would not be found no matter how high the slop is - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("nonexisting phrase with repetitions does not exist in any doc", 0, hits.length); QueryUtils.check(random(), query,searcher); @@ -481,7 +481,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(0); // to use exact phrase scorer query.add(new Term("field", "two")); query.add(new Term("field", "three")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("phrase found with exact phrase scorer", 1, hits.length); float score0 = hits[0].score; //System.out.println("(exact) field: two three: "+score0); @@ -489,7 +489,7 @@ public class TestPhraseQuery extends LuceneTestCase { // search on non palyndrome, find phrase with slop 2, though no slop required here. query.setSlop(2); // to use sloppy scorer - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); float score1 = hits[0].score; //System.out.println("(sloppy) field: two three: "+score1); @@ -501,7 +501,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(2); // must be at least two for both ordered and reversed to match query.add(new Term("palindrome", "two")); query.add(new Term("palindrome", "three")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); //float score2 = hits[0].score; //System.out.println("palindrome: two three: "+score2); @@ -515,7 +515,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.setSlop(2); // must be at least two for both ordered and reversed to match query.add(new Term("palindrome", "three")); query.add(new Term("palindrome", "two")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); //float score3 = hits[0].score; //System.out.println("palindrome: three two: "+score3); @@ -542,7 +542,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("field", "one")); query.add(new Term("field", "two")); query.add(new Term("field", "three")); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("phrase found with exact phrase scorer", 1, hits.length); float score0 = hits[0].score; //System.out.println("(exact) field: one two three: "+score0); @@ -553,7 +553,7 @@ public class TestPhraseQuery extends LuceneTestCase { // search on non palyndrome, find phrase with slop 3, though no slop required here. query.setSlop(4); // to use sloppy scorer - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); float score1 = hits[0].score; //System.out.println("(sloppy) field: one two three: "+score1); @@ -566,7 +566,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("palindrome", "one")); query.add(new Term("palindrome", "two")); query.add(new Term("palindrome", "three")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; // just make sure no exc: searcher.explain(query, 0); @@ -585,7 +585,7 @@ public class TestPhraseQuery extends LuceneTestCase { query.add(new Term("palindrome", "three")); query.add(new Term("palindrome", "two")); query.add(new Term("palindrome", "one")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("just sloppy enough", 1, hits.length); //float score3 = hits[0].score; //System.out.println("palindrome: three two one: "+score3); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java index 1e0ed480634..710827325d1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPositionIncrement.java @@ -122,40 +122,40 @@ public class TestPositionIncrement extends LuceneTestCase { q = new PhraseQuery(); q.add(new Term("field", "1")); q.add(new Term("field", "2")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); // same as previous, just specify positions explicitely. q = new PhraseQuery(); q.add(new Term("field", "1"),0); q.add(new Term("field", "2"),1); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); // specifying correct positions should find the phrase. q = new PhraseQuery(); q.add(new Term("field", "1"),0); q.add(new Term("field", "2"),2); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "2")); q.add(new Term("field", "3")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "3")); q.add(new Term("field", "4")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); // phrase query would find it when correct positions are specified. q = new PhraseQuery(); q.add(new Term("field", "3"),0); q.add(new Term("field", "4"),0); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); // phrase query should fail for non existing searched term @@ -163,38 +163,38 @@ public class TestPositionIncrement extends LuceneTestCase { q = new PhraseQuery(); q.add(new Term("field", "3"),0); q.add(new Term("field", "9"),0); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); // multi-phrase query should succed for non existing searched term // because there exist another searched terms in the same searched position. MultiPhraseQuery mq = new MultiPhraseQuery(); mq.add(new Term[]{new Term("field", "3"),new Term("field", "9")},0); - hits = searcher.search(mq, null, 1000).scoreDocs; + hits = searcher.search(mq, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "2")); q.add(new Term("field", "4")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "3")); q.add(new Term("field", "5")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "4")); q.add(new Term("field", "5")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(1, hits.length); q = new PhraseQuery(); q.add(new Term("field", "2")); q.add(new Term("field", "5")); - hits = searcher.search(q, null, 1000).scoreDocs; + hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPrefixFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestPrefixFilter.java index b41b6ddb35c..4fb5c3fb385 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPrefixFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPrefixFilter.java @@ -49,55 +49,55 @@ public class TestPrefixFilter extends LuceneTestCase { PrefixFilter filter = new PrefixFilter(new Term("category", "/Computers")); Query query = new ConstantScoreQuery(filter); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(4, hits.length); // test middle of values filter = new PrefixFilter(new Term("category", "/Computers/Mac")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(2, hits.length); // test start of values filter = new PrefixFilter(new Term("category", "/Computers/Linux")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // test end of values filter = new PrefixFilter(new Term("category", "/Computers/Windows")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // test non-existant filter = new PrefixFilter(new Term("category", "/Computers/ObsoleteOS")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // test non-existant, before values filter = new PrefixFilter(new Term("category", "/Computers/AAA")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // test non-existant, after values filter = new PrefixFilter(new Term("category", "/Computers/ZZZ")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // test zero length prefix filter = new PrefixFilter(new Term("category", "")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(4, hits.length); // test non existent field filter = new PrefixFilter(new Term("nonexistantfield", "/Computers")); query = new ConstantScoreQuery(filter); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); writer.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPrefixInBooleanQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPrefixInBooleanQuery.java index b8a0e11becc..0f81b6cdae8 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPrefixInBooleanQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPrefixInBooleanQuery.java @@ -85,12 +85,12 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase { public void testPrefixQuery() throws Exception { Query query = new PrefixQuery(new Term(FIELD, "tang")); assertEquals("Number of matched documents", 2, - searcher.search(query, null, 1000).totalHits); + searcher.search(query, 1000).totalHits); } public void testTermQuery() throws Exception { Query query = new TermQuery(new Term(FIELD, "tangfulin")); assertEquals("Number of matched documents", 2, - searcher.search(query, null, 1000).totalHits); + searcher.search(query, 1000).totalHits); } public void testTermBooleanQuery() throws Exception { BooleanQuery query = new BooleanQuery(); @@ -99,7 +99,7 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase { query.add(new TermQuery(new Term(FIELD, "notexistnames")), BooleanClause.Occur.SHOULD); assertEquals("Number of matched documents", 2, - searcher.search(query, null, 1000).totalHits); + searcher.search(query, 1000).totalHits); } public void testPrefixBooleanQuery() throws Exception { @@ -109,6 +109,6 @@ public class TestPrefixInBooleanQuery extends LuceneTestCase { query.add(new TermQuery(new Term(FIELD, "notexistnames")), BooleanClause.Occur.SHOULD); assertEquals("Number of matched documents", 2, - searcher.search(query, null, 1000).totalHits); + searcher.search(query, 1000).totalHits); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPrefixQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestPrefixQuery.java index 06f0595cadb..8869f7245e5 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPrefixQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPrefixQuery.java @@ -48,17 +48,17 @@ public class TestPrefixQuery extends LuceneTestCase { PrefixQuery query = new PrefixQuery(new Term("category", "/Computers")); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("All documents in /Computers category and below", 3, hits.length); query = new PrefixQuery(new Term("category", "/Computers/Mac")); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("One in /Computers/Mac", 1, hits.length); query = new PrefixQuery(new Term("category", "")); Terms terms = MultiFields.getTerms(searcher.getIndexReader(), "category"); assertFalse(query.getTermsEnum(terms) instanceof PrefixTermsEnum); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("everything", 3, hits.length); writer.close(); reader.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java index d727aec2d32..8853af97d0e 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java @@ -46,9 +46,9 @@ public class TestQueryWrapperFilter extends LuceneTestCase { QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery); IndexSearcher searcher = newSearcher(reader); - TopDocs hits = searcher.search(new MatchAllDocsQuery(), qwf, 10); + TopDocs hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10); assertEquals(1, hits.totalHits); - hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10); assertEquals(1, hits.totalHits); // should not throw exception with complex primitive query @@ -58,26 +58,26 @@ public class TestQueryWrapperFilter extends LuceneTestCase { Occur.MUST_NOT); qwf = new QueryWrapperFilter(termQuery); - hits = searcher.search(new MatchAllDocsQuery(), qwf, 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10); assertEquals(1, hits.totalHits); - hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10); assertEquals(1, hits.totalHits); // should not throw exception with non primitive Query (doesn't implement // Query#createWeight) qwf = new QueryWrapperFilter(new FuzzyQuery(new Term("field", "valu"))); - hits = searcher.search(new MatchAllDocsQuery(), qwf, 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10); assertEquals(1, hits.totalHits); - hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10); assertEquals(1, hits.totalHits); // test a query with no hits termQuery = new TermQuery(new Term("field", "not_exist")); qwf = new QueryWrapperFilter(termQuery); - hits = searcher.search(new MatchAllDocsQuery(), qwf, 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10); assertEquals(0, hits.totalHits); - hits = searcher.search(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf), 10); + hits = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), new CachingWrapperFilter(qwf)), 10); assertEquals(0, hits.totalHits); reader.close(); dir.close(); @@ -113,8 +113,8 @@ public class TestQueryWrapperFilter extends LuceneTestCase { final IndexReader r = w.getReader(); w.close(); - final TopDocs hits = newSearcher(r).search(new MatchAllDocsQuery(), - new QueryWrapperFilter(new TermQuery(new Term("field", "a"))), + final TopDocs hits = newSearcher(r).search(new FilteredQuery(new MatchAllDocsQuery(), + new QueryWrapperFilter(new TermQuery(new Term("field", "a")))), numDocs); assertEquals(aDocs.size(), hits.totalHits); for(ScoreDoc sd: hits.scoreDocs) { @@ -141,7 +141,7 @@ public class TestQueryWrapperFilter extends LuceneTestCase { for (int i = 0; i < 1000; i++) { TermQuery termQuery = new TermQuery(new Term("field", English.intToEnglish(i))); QueryWrapperFilter qwf = new QueryWrapperFilter(termQuery); - TopDocs td = searcher.search(new MatchAllDocsQuery(), qwf, 10); + TopDocs td = searcher.search(new FilteredQuery(new MatchAllDocsQuery(), qwf), 10); assertEquals(1, td.totalHits); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java b/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java index b2029a9cbb4..d1aff6cdec9 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSearchAfter.java @@ -180,11 +180,8 @@ public class TestSearchAfter extends LuceneTestCase { // pages. int n = atLeast(20); for (int i = 0; i < n; i++) { - Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd"))); assertQuery(new MatchAllDocsQuery(), null); assertQuery(new TermQuery(new Term("english", "one")), null); - assertQuery(new MatchAllDocsQuery(), odd); - assertQuery(new TermQuery(new Term("english", "four")), odd); BooleanQuery bq = new BooleanQuery(); bq.add(new TermQuery(new Term("english", "one")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term("oddeven", "even")), BooleanClause.Occur.SHOULD); @@ -192,15 +189,15 @@ public class TestSearchAfter extends LuceneTestCase { } } - void assertQuery(Query query, Filter filter) throws Exception { - assertQuery(query, filter, null); - assertQuery(query, filter, Sort.RELEVANCE); - assertQuery(query, filter, Sort.INDEXORDER); + void assertQuery(Query query) throws Exception { + assertQuery(query, null); + assertQuery(query, Sort.RELEVANCE); + assertQuery(query, Sort.INDEXORDER); for(SortField sortField : allSortFields) { - assertQuery(query, filter, new Sort(new SortField[] {sortField})); + assertQuery(query, new Sort(new SortField[] {sortField})); } for(int i=0;i<20;i++) { - assertQuery(query, filter, getRandomSort()); + assertQuery(query, getRandomSort()); } } @@ -212,21 +209,21 @@ public class TestSearchAfter extends LuceneTestCase { return new Sort(sortFields); } - void assertQuery(Query query, Filter filter, Sort sort) throws Exception { + void assertQuery(Query query, Sort sort) throws Exception { int maxDoc = searcher.getIndexReader().maxDoc(); TopDocs all; int pageSize = TestUtil.nextInt(random(), 1, maxDoc * 2); if (VERBOSE) { - System.out.println("\nassertQuery " + (iter++) + ": query=" + query + " filter=" + filter + " sort=" + sort + " pageSize=" + pageSize); + System.out.println("\nassertQuery " + (iter++) + ": query=" + query + " sort=" + sort + " pageSize=" + pageSize); } final boolean doMaxScore = random().nextBoolean(); final boolean doScores = random().nextBoolean(); if (sort == null) { - all = searcher.search(query, filter, maxDoc); + all = searcher.search(query, maxDoc); } else if (sort == Sort.RELEVANCE) { - all = searcher.search(query, filter, maxDoc, sort, true, doMaxScore); + all = searcher.search(query, maxDoc, sort, true, doMaxScore); } else { - all = searcher.search(query, filter, maxDoc, sort, doScores, doMaxScore); + all = searcher.search(query, maxDoc, sort, doScores, doMaxScore); } if (VERBOSE) { System.out.println(" all.totalHits=" + all.totalHits); @@ -243,15 +240,15 @@ public class TestSearchAfter extends LuceneTestCase { if (VERBOSE) { System.out.println(" iter lastBottom=" + lastBottom); } - paged = searcher.searchAfter(lastBottom, query, filter, pageSize); + paged = searcher.searchAfter(lastBottom, query, pageSize); } else { if (VERBOSE) { System.out.println(" iter lastBottom=" + lastBottom); } if (sort == Sort.RELEVANCE) { - paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort, true, doMaxScore); + paged = searcher.searchAfter(lastBottom, query, pageSize, sort, true, doMaxScore); } else { - paged = searcher.searchAfter(lastBottom, query, filter, pageSize, sort, doScores, doMaxScore); + paged = searcher.searchAfter(lastBottom, query, pageSize, sort, doScores, doMaxScore); } } if (VERBOSE) { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java b/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java index afc44efc578..f6fc83d75c3 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSortRandom.java @@ -148,7 +148,7 @@ public class TestSortRandom extends LuceneTestCase { } final int hitCount = TestUtil.nextInt(random, 1, r.maxDoc() + 20); final RandomFilter f = new RandomFilter(random, random.nextFloat(), docValues); - int queryType = random.nextInt(3); + int queryType = random.nextInt(2); if (queryType == 0) { // force out of order BooleanQuery bq = new BooleanQuery(); @@ -158,13 +158,10 @@ public class TestSortRandom extends LuceneTestCase { // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return // the clause instead of BQ. bq.setMinimumNumberShouldMatch(1); - hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean()); - } else if (queryType == 1) { - hits = s.search(new ConstantScoreQuery(f), - null, hitCount, sort, random.nextBoolean(), random.nextBoolean()); + hits = s.search(new FilteredQuery(bq, f), hitCount, sort, random.nextBoolean(), random.nextBoolean()); } else { - hits = s.search(new MatchAllDocsQuery(), - f, hitCount, sort, random.nextBoolean(), random.nextBoolean()); + hits = s.search(new ConstantScoreQuery(f), + hitCount, sort, random.nextBoolean(), random.nextBoolean()); } if (VERBOSE) { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java b/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java index 9d555e86cee..2a9d63e0f6b 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSortedNumericSortField.java @@ -37,13 +37,13 @@ public class TestSortedNumericSortField extends LuceneTestCase { Sort sort = new Sort(); sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG)); - TopDocs td = empty.search(query, null, 10, sort, true, true); + TopDocs td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); // for an empty index, any selector should work for (SortedNumericSelector.Type v : SortedNumericSelector.Type.values()) { sort.setSort(new SortedNumericSortField("sortednumeric", SortField.Type.LONG, false, v)); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java b/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java index 26b62215f67..0b46e894916 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSortedSetSortField.java @@ -37,13 +37,13 @@ public class TestSortedSetSortField extends LuceneTestCase { Sort sort = new Sort(); sort.setSort(new SortedSetSortField("sortedset", false)); - TopDocs td = empty.search(query, null, 10, sort, true, true); + TopDocs td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); // for an empty index, any selector should work for (SortedSetSelector.Type v : SortedSetSelector.Type.values()) { sort.setSort(new SortedSetSortField("sortedset", false, v)); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); } } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java b/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java index 3a448d1e73b..e5315f96a57 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestSubScorerFreqs.java @@ -124,7 +124,7 @@ public class TestSubScorerFreqs extends LuceneTestCase { public void testTermQuery() throws Exception { TermQuery q = new TermQuery(new Term("f", "d")); CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10)); - s.search(q, null, c); + s.search(q, c); final int maxDocs = s.getIndexReader().maxDoc(); assertEquals(maxDocs, c.docCounts.size()); for (int i = 0; i < maxDocs; i++) { @@ -164,7 +164,7 @@ public class TestSubScorerFreqs extends LuceneTestCase { for (final Set occur : occurList) { CountingCollector c = new CountingCollector(TopScoreDocCollector.create( 10), occur); - s.search(query, null, c); + s.search(query, c); final int maxDocs = s.getIndexReader().maxDoc(); assertEquals(maxDocs, c.docCounts.size()); boolean includeOptional = occur.contains("SHOULD"); @@ -196,7 +196,7 @@ public class TestSubScorerFreqs extends LuceneTestCase { q.add(new Term("f", "b")); q.add(new Term("f", "c")); CountingCollector c = new CountingCollector(TopScoreDocCollector.create(10)); - s.search(q, null, c); + s.search(q, c); final int maxDocs = s.getIndexReader().maxDoc(); assertEquals(maxDocs, c.docCounts.size()); for (int i = 0; i < maxDocs; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermRangeFilter.java b/lucene/core/src/test/org/apache/lucene/search/TestTermRangeFilter.java index ff57a85d56f..4910c927d48 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermRangeFilter.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermRangeFilter.java @@ -54,83 +54,83 @@ public class TestTermRangeFilter extends BaseTestRangeFilter { // test id, bounded on both ends - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, T)), numDocs).scoreDocs; assertEquals("find all", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, T, F)), numDocs).scoreDocs; assertEquals("all but last", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, T)), numDocs).scoreDocs; assertEquals("all but first", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, maxIP, F, F)), numDocs).scoreDocs; assertEquals("all but ends", numDocs - 2, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, T)), numDocs).scoreDocs; assertEquals("med and up", 1 + maxId - medId, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, medIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, medIP, T, T)), numDocs).scoreDocs; assertEquals("up to med", 1 + medId - minId, result.length); // unbounded id - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, null, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, null, T, F)), numDocs).scoreDocs; assertEquals("min and up", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", null, maxIP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, maxIP, F, T)), numDocs).scoreDocs; assertEquals("max and down", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, null, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, null, F, F)), numDocs).scoreDocs; assertEquals("not min, but up", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", null, maxIP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, maxIP, F, F)), numDocs).scoreDocs; assertEquals("not max, but down", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, maxIP, T, F)), numDocs).scoreDocs; assertEquals("med and up, not max", maxId - medId, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, medIP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, medIP, F, T)), numDocs).scoreDocs; assertEquals("not min, up to med", medId - minId, result.length); // very small sets - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, minIP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, minIP, F, F)), numDocs).scoreDocs; assertEquals("min,min,F,F", 0, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", medIP, medIP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, medIP, F, F)), numDocs).scoreDocs; assertEquals("med,med,F,F", 0, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, F, F)), numDocs).scoreDocs; assertEquals("max,max,F,F", 0, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", minIP, minIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", minIP, minIP, T, T)), numDocs).scoreDocs; assertEquals("min,min,T,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", null, minIP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", null, minIP, F, T)), numDocs).scoreDocs; assertEquals("nul,min,F,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, maxIP, T, T)), numDocs).scoreDocs; assertEquals("max,max,T,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", maxIP, null, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", maxIP, null, T, F)), numDocs).scoreDocs; assertEquals("max,nul,T,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("id", medIP, medIP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("id", medIP, medIP, T, T)), numDocs).scoreDocs; assertEquals("med,med,T,T", 1, result.length); } @@ -153,60 +153,60 @@ public class TestTermRangeFilter extends BaseTestRangeFilter { // test extremes, bounded on both ends - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, T)), numDocs).scoreDocs; assertEquals("find all", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, T, F)), numDocs).scoreDocs; assertEquals("all but biggest", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, T)), numDocs).scoreDocs; assertEquals("all but smallest", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, maxRP, F, F)), numDocs).scoreDocs; assertEquals("all but extremes", numDocs - 2, result.length); // unbounded - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, null, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, null, T, F)), numDocs).scoreDocs; assertEquals("smallest and up", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, T)), numDocs).scoreDocs; assertEquals("biggest and down", numDocs, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, null, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, null, F, F)), numDocs).scoreDocs; assertEquals("not smallest, but up", numDocs - 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, maxRP, F, F)), numDocs).scoreDocs; assertEquals("not biggest, but down", numDocs - 1, result.length); // very small sets - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, minRP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, minRP, F, F)), numDocs).scoreDocs; assertEquals("min,min,F,F", 0, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, F, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, F, F)), numDocs).scoreDocs; assertEquals("max,max,F,F", 0, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", minRP, minRP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", minRP, minRP, T, T)), numDocs).scoreDocs; assertEquals("min,min,T,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", null, minRP, F, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", null, minRP, F, T)), numDocs).scoreDocs; assertEquals("nul,min,F,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, T, T), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, maxRP, T, T)), numDocs).scoreDocs; assertEquals("max,max,T,T", 1, result.length); - result = search.search(q, TermRangeFilter.newStringRange("rand", maxRP, null, T, F), + result = search.search(new FilteredQuery(q, TermRangeFilter.newStringRange("rand", maxRP, null, T, F)), numDocs).scoreDocs; assertEquals("max,nul,T,T", 1, result.length); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java index b954e3389cd..79c60b2526e 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermRangeQuery.java @@ -56,21 +56,21 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[] {"A", "B", "C", "D"}); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("A,B,C,D, only B in range", 1, hits.length); reader.close(); initializeIndex(new String[] {"A", "B", "D"}); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("A,B,D, only B in range", 1, hits.length); reader.close(); addDoc("C"); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("C added, still only B in range", 1, hits.length); reader.close(); } @@ -81,21 +81,21 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[]{"A", "B", "C", "D"}); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("A,B,C,D - A,B,C in range", 3, hits.length); reader.close(); initializeIndex(new String[]{"A", "B", "D"}); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("A,B,D - A and B in range", 2, hits.length); reader.close(); addDoc("C"); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("C added - A, B, C in range", 3, hits.length); reader.close(); } @@ -107,17 +107,17 @@ public class TestTermRangeQuery extends LuceneTestCase { TermRangeQuery query = new TermRangeQuery("content", null, null, true, true); Terms terms = MultiFields.getTerms(searcher.getIndexReader(), "content"); assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum); - assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length); + assertEquals(4, searcher.search(query, 1000).scoreDocs.length); query = new TermRangeQuery("content", null, null, false, false); assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum); - assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length); + assertEquals(4, searcher.search(query, 1000).scoreDocs.length); query = TermRangeQuery.newStringRange("content", "", null, true, false); assertFalse(query.getTermsEnum(terms) instanceof TermRangeTermsEnum); - assertEquals(4, searcher.search(query, null, 1000).scoreDocs.length); + assertEquals(4, searcher.search(query, 1000).scoreDocs.length); // and now anothe one query = TermRangeQuery.newStringRange("content", "B", null, true, false); assertTrue(query.getTermsEnum(terms) instanceof TermRangeTermsEnum); - assertEquals(3, searcher.search(query, null, 1000).scoreDocs.length); + assertEquals(3, searcher.search(query, 1000).scoreDocs.length); reader.close(); } @@ -276,7 +276,7 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[] {"A", "B", "", "C", "D"}, analyzer); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - int numHits = searcher.search(query, null, 1000).totalHits; + int numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("A,B,,C,D => A, B & are in range", 3, numHits); // until Lucene-38 is fixed, use this assert: @@ -286,7 +286,7 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[] {"A", "B", "", "D"}, analyzer); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - numHits = searcher.search(query, null, 1000).totalHits; + numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("A,B,,D => A, B & are in range", 3, numHits); // until Lucene-38 is fixed, use this assert: @@ -295,7 +295,7 @@ public class TestTermRangeQuery extends LuceneTestCase { addDoc("C"); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - numHits = searcher.search(query, null, 1000).totalHits; + numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("C added, still A, B & are in range", 3, numHits); // until Lucene-38 is fixed, use this assert @@ -311,7 +311,7 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[]{"A", "B", "","C", "D"}, analyzer); IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = newSearcher(reader); - int numHits = searcher.search(query, null, 1000).totalHits; + int numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("A,B,,C,D => A,B,,C in range", 4, numHits); // until Lucene-38 is fixed, use this assert @@ -320,7 +320,7 @@ public class TestTermRangeQuery extends LuceneTestCase { initializeIndex(new String[]{"A", "B", "", "D"}, analyzer); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - numHits = searcher.search(query, null, 1000).totalHits; + numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("A,B,,D - A, B and in range", 3, numHits); // until Lucene-38 is fixed, use this assert @@ -329,7 +329,7 @@ public class TestTermRangeQuery extends LuceneTestCase { addDoc("C"); reader = DirectoryReader.open(dir); searcher = newSearcher(reader); - numHits = searcher.search(query, null, 1000).totalHits; + numHits = searcher.search(query, 1000).totalHits; // When Lucene-38 is fixed, use the assert on the next line: assertEquals("C added => A,B,,C in range", 4, numHits); // until Lucene-38 is fixed, use this assert diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java index 4a9d84c736c..c7b6a0863b6 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java @@ -105,7 +105,7 @@ public class TestTimeLimitingCollector extends LuceneTestCase { query = booleanQuery; // warm the searcher - searcher.search(query, null, 1000); + searcher.search(query, 1000); } @Override diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTotalHitCountCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTotalHitCountCollector.java index 409ef71d206..c2d3443186a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTotalHitCountCollector.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTotalHitCountCollector.java @@ -41,7 +41,7 @@ public class TestTotalHitCountCollector extends LuceneTestCase { IndexSearcher searcher = newSearcher(reader); TotalHitCountCollector c = new TotalHitCountCollector(); - searcher.search(new MatchAllDocsQuery(), null, c); + searcher.search(new MatchAllDocsQuery(), c); assertEquals(5, c.getTotalHits()); reader.close(); indexStore.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java b/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java index dbbaad634b0..3e8f8e77117 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestWildcard.java @@ -254,7 +254,7 @@ public class TestWildcard private void assertMatches(IndexSearcher searcher, Query q, int expectedMatches) throws IOException { - ScoreDoc[] result = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] result = searcher.search(q, 1000).scoreDocs; assertEquals(expectedMatches, result.length); } @@ -354,14 +354,14 @@ public class TestWildcard // test queries that must find all for (Query q : matchAll) { if (VERBOSE) System.out.println("matchAll: q=" + q + " " + q.getClass().getName()); - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals(docs.length, hits.length); } // test queries that must find none for (Query q : matchNone) { if (VERBOSE) System.out.println("matchNone: q=" + q + " " + q.getClass().getName()); - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals(0, hits.length); } @@ -370,7 +370,7 @@ public class TestWildcard for (int j = 0; j < matchOneDocPrefix[i].length; j++) { Query q = matchOneDocPrefix[i][j]; if (VERBOSE) System.out.println("match 1 prefix: doc="+docs[i]+" q="+q+" "+q.getClass().getName()); - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals(1,hits.length); assertEquals(i,hits[0].doc); } @@ -381,7 +381,7 @@ public class TestWildcard for (int j = 0; j < matchOneDocWild[i].length; j++) { Query q = matchOneDocWild[i][j]; if (VERBOSE) System.out.println("match 1 wild: doc="+docs[i]+" q="+q+" "+q.getClass().getName()); - ScoreDoc[] hits = searcher.search(q, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs; assertEquals(1,hits.length); assertEquals(i,hits[0].doc); } diff --git a/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java b/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java index 0ddb46adda4..fe977a67ca1 100644 --- a/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadNearQuery.java @@ -141,7 +141,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("should be 10 hits", hits.totalHits == 10); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -155,7 +155,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { } // all should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertEquals("should be 100 hits", 100, hits.totalHits); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -179,7 +179,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { clauses[1] = q2; query = new PayloadNearQuery(clauses, 10, false); //System.out.println(query.toString()); - assertEquals(12, searcher.search(query, null, 100).totalHits); + assertEquals(12, searcher.search(query, 100).totalHits); /* System.out.println(hits.totalHits); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -197,7 +197,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { QueryUtils.check(query); // all 10 hits should have score = 3 because adjacent terms have payloads of 2,4 // and all the similarity factors are set to 1 - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("should be 10 hits", hits.totalHits == 10); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -216,7 +216,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { query = newPhraseQuery("field", "twenty two", true, new MaxPayloadFunction()); QueryUtils.check(query); // all 10 hits should have score = 4 (max payload value) - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("should be 10 hits", hits.totalHits == 10); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -235,7 +235,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { query = newPhraseQuery("field", "twenty two", true, new MinPayloadFunction()); QueryUtils.check(query); // all 10 hits should have score = 2 (min payload value) - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("should be 10 hits", hits.totalHits == 10); for (int j = 0; j < hits.scoreDocs.length; j++) { @@ -269,7 +269,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { PayloadNearQuery query; TopDocs hits; query = newPhraseQuery("field", "nine hundred ninety nine", true, new AveragePayloadFunction()); - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); ScoreDoc doc = hits.scoreDocs[0]; // System.out.println("Doc: " + doc.toString()); @@ -291,7 +291,7 @@ public class TestPayloadNearQuery extends LuceneTestCase { SpanQuery q4 = newPhraseQuery("field", "hundred nine", false, new AveragePayloadFunction()); SpanQuery[]clauses = new SpanQuery[] {new PayloadNearQuery(new SpanQuery[] {q1,q2}, 0, true), new PayloadNearQuery(new SpanQuery[] {q3,q4}, 0, false)}; query = new PayloadNearQuery(clauses, 0, false); - hits = searcher.search(query, null, 100); + hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); // should be only 1 hit - doc 999 assertTrue("should only be one hit", hits.scoreDocs.length == 1); diff --git a/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java b/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java index 7833f572c19..80fe83fc6cf 100644 --- a/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/payloads/TestPayloadTermQuery.java @@ -147,7 +147,7 @@ public class TestPayloadTermQuery extends LuceneTestCase { public void test() throws IOException { PayloadTermQuery query = new PayloadTermQuery(new Term("field", "seventy"), new MaxPayloadFunction()); - TopDocs hits = searcher.search(query, null, 100); + TopDocs hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100); @@ -188,7 +188,7 @@ public class TestPayloadTermQuery extends LuceneTestCase { public void testMultipleMatchesPerDoc() throws Exception { PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.MULTI_FIELD, "seventy"), new MaxPayloadFunction()); - TopDocs hits = searcher.search(query, null, 100); + TopDocs hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100); @@ -230,7 +230,7 @@ public class TestPayloadTermQuery extends LuceneTestCase { IndexReader reader = DirectoryReader.open(directory); IndexSearcher theSearcher = newSearcher(reader); theSearcher.setSimilarity(new FullSimilarity()); - TopDocs hits = searcher.search(query, null, 100); + TopDocs hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("hits Size: " + hits.totalHits + " is not: " + 100, hits.totalHits == 100); @@ -267,7 +267,7 @@ public class TestPayloadTermQuery extends LuceneTestCase { public void testNoMatch() throws Exception { PayloadTermQuery query = new PayloadTermQuery(new Term(PayloadHelper.FIELD, "junk"), new MaxPayloadFunction()); - TopDocs hits = searcher.search(query, null, 100); + TopDocs hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("hits Size: " + hits.totalHits + " is not: " + 0, hits.totalHits == 0); @@ -283,7 +283,7 @@ public class TestPayloadTermQuery extends LuceneTestCase { BooleanQuery query = new BooleanQuery(); query.add(c1); query.add(c2); - TopDocs hits = searcher.search(query, null, 100); + TopDocs hits = searcher.search(query, 100); assertTrue("hits is null and it shouldn't be", hits != null); assertTrue("hits Size: " + hits.totalHits + " is not: " + 1, hits.totalHits == 1); int[] results = new int[1]; diff --git a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java index 1ae61687805..ac456fd35a3 100644 --- a/lucene/core/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java +++ b/lucene/core/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java @@ -136,7 +136,7 @@ public class TestSpansAdvanced extends LuceneTestCase { // Hits hits = searcher.search(query); // hits normalizes and throws things off if one score is greater than 1.0 - TopDocs topdocs = s.search(query, null, 10000); + TopDocs topdocs = s.search(query, 10000); /***** * // display the hits System.out.println(hits.length() + diff --git a/lucene/core/src/test/org/apache/lucene/store/TestBufferedIndexInput.java b/lucene/core/src/test/org/apache/lucene/store/TestBufferedIndexInput.java index 003d4c47354..188f1ee0bfc 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestBufferedIndexInput.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestBufferedIndexInput.java @@ -237,7 +237,7 @@ public class TestBufferedIndexInput extends LuceneTestCase { writer.deleteDocuments(new Term("id", "0")); reader = DirectoryReader.open(writer, true); IndexSearcher searcher = newSearcher(reader); - ScoreDoc[] hits = searcher.search(new TermQuery(bbb), null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs; dir.tweakBufferSizes(); assertEquals(36, hits.length); @@ -248,14 +248,14 @@ public class TestBufferedIndexInput extends LuceneTestCase { reader = DirectoryReader.open(writer, true); searcher = newSearcher(reader); - hits = searcher.search(new TermQuery(bbb), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(bbb), 1000).scoreDocs; dir.tweakBufferSizes(); assertEquals(35, hits.length); dir.tweakBufferSizes(); - hits = searcher.search(new TermQuery(new Term("id", "33")), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(new Term("id", "33")), 1000).scoreDocs; dir.tweakBufferSizes(); assertEquals(1, hits.length); - hits = searcher.search(new TermQuery(aaa), null, 1000).scoreDocs; + hits = searcher.search(new TermQuery(aaa), 1000).scoreDocs; dir.tweakBufferSizes(); assertEquals(35, hits.length); writer.close(); diff --git a/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java b/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java index 8c5c6952136..8b582d3ac7d 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestLockFactory.java @@ -280,7 +280,7 @@ public class TestLockFactory extends LuceneTestCase { break; } try { - searcher.search(query, null, 1000); + searcher.search(query, 1000); } catch (IOException e) { hitException = true; System.out.println("Stress Test Index Searcher: search hit unexpected exception: " + e.toString()); diff --git a/lucene/demo/src/java/org/apache/lucene/demo/SearchFiles.java b/lucene/demo/src/java/org/apache/lucene/demo/SearchFiles.java index daade5c22eb..ba148d2abf4 100644 --- a/lucene/demo/src/java/org/apache/lucene/demo/SearchFiles.java +++ b/lucene/demo/src/java/org/apache/lucene/demo/SearchFiles.java @@ -120,7 +120,7 @@ public class SearchFiles { if (repeat > 0) { // repeat & time as benchmark Date start = new Date(); for (int i = 0; i < repeat; i++) { - searcher.search(query, null, 100); + searcher.search(query, 100); } Date end = new Date(); System.out.println("Time: "+(end.getTime()-start.getTime())+"ms"); diff --git a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java index 78490339448..7a6a36ec653 100644 --- a/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java +++ b/lucene/demo/src/java/org/apache/lucene/demo/facet/SimpleFacetsExample.java @@ -131,7 +131,7 @@ public class SimpleFacetsExample { // MatchAllDocsQuery is for "browsing" (counts facets // for all non-deleted docs in the index); normally // you'd use a "normal" query: - searcher.search(new MatchAllDocsQuery(), null /*Filter */, fc); + searcher.search(new MatchAllDocsQuery(), fc); // Retrieve results List results = new ArrayList<>(); diff --git a/lucene/expressions/src/test/org/apache/lucene/expressions/TestDemoExpressions.java b/lucene/expressions/src/test/org/apache/lucene/expressions/TestDemoExpressions.java index 7502f3a0225..039f1a7bacb 100644 --- a/lucene/expressions/src/test/org/apache/lucene/expressions/TestDemoExpressions.java +++ b/lucene/expressions/src/test/org/apache/lucene/expressions/TestDemoExpressions.java @@ -105,7 +105,7 @@ public class TestDemoExpressions extends LuceneTestCase { // create a sort field and sort by it (reverse order) Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - searcher.search(query, null, 3, sort); + searcher.search(query, 3, sort); } /** tests the returned sort values are correct */ @@ -117,7 +117,7 @@ public class TestDemoExpressions extends LuceneTestCase { Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); + TopFieldDocs td = searcher.search(query, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = (float) Math.sqrt(d.score); @@ -135,7 +135,7 @@ public class TestDemoExpressions extends LuceneTestCase { Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); + TopFieldDocs td = searcher.search(query, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = 2*d.score; @@ -154,7 +154,7 @@ public class TestDemoExpressions extends LuceneTestCase { Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); + TopFieldDocs td = searcher.search(query, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = 2*d.score; @@ -174,7 +174,7 @@ public class TestDemoExpressions extends LuceneTestCase { Sort sort = new Sort(expr2.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); + TopFieldDocs td = searcher.search(query, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = 2*d.score; @@ -206,7 +206,7 @@ public class TestDemoExpressions extends LuceneTestCase { Expression expr = JavascriptCompiler.compile(sb.toString()); Sort sort = new Sort(expr.getSortField(bindings, true)); Query query = new TermQuery(new Term("body", "contents")); - TopFieldDocs td = searcher.search(query, null, 3, sort, true, true); + TopFieldDocs td = searcher.search(query, 3, sort, true, true); for (int i = 0; i < 3; i++) { FieldDoc d = (FieldDoc) td.scoreDocs[i]; float expected = n*d.score; @@ -221,7 +221,7 @@ public class TestDemoExpressions extends LuceneTestCase { bindings.add(new SortField("latitude", SortField.Type.DOUBLE)); bindings.add(new SortField("longitude", SortField.Type.DOUBLE)); Sort sort = new Sort(distance.getSortField(bindings, false)); - TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort); + TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort); FieldDoc d = (FieldDoc) td.scoreDocs[0]; assertEquals(0.4619D, (Double)d.fields[0], 1E-4); @@ -238,7 +238,7 @@ public class TestDemoExpressions extends LuceneTestCase { SimpleBindings bindings = new SimpleBindings(); bindings.add("doc['popularity'].value", new IntFieldSource("popularity")); Sort sort = new Sort(popularity.getSortField(bindings, true)); - TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort); + TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort); FieldDoc d = (FieldDoc)td.scoreDocs[0]; assertEquals(20D, (Double)d.fields[0], 1E-4); @@ -288,7 +288,7 @@ public class TestDemoExpressions extends LuceneTestCase { } }; Sort sort = new Sort(popularity.getSortField(bindings, false)); - TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), null, 3, sort); + TopFieldDocs td = searcher.search(new MatchAllDocsQuery(), 3, sort); FieldDoc d = (FieldDoc)td.scoreDocs[0]; assertEquals(2092D, (Double)d.fields[0], 1E-4); diff --git a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionSorts.java b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionSorts.java index fc95924756c..ff87fc0c106 100644 --- a/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionSorts.java +++ b/lucene/expressions/src/test/org/apache/lucene/expressions/TestExpressionSorts.java @@ -86,23 +86,20 @@ public class TestExpressionSorts extends LuceneTestCase { public void testQueries() throws Exception { int n = atLeast(4); for (int i = 0; i < n; i++) { - Filter odd = new QueryWrapperFilter(new TermQuery(new Term("oddeven", "odd"))); - assertQuery(new MatchAllDocsQuery(), null); - assertQuery(new TermQuery(new Term("english", "one")), null); - assertQuery(new MatchAllDocsQuery(), odd); - assertQuery(new TermQuery(new Term("english", "four")), odd); + assertQuery(new MatchAllDocsQuery()); + assertQuery(new TermQuery(new Term("english", "one"))); BooleanQuery bq = new BooleanQuery(); bq.add(new TermQuery(new Term("english", "one")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term("oddeven", "even")), BooleanClause.Occur.SHOULD); - assertQuery(bq, null); + assertQuery(bq); // force in order bq.add(new TermQuery(new Term("english", "two")), BooleanClause.Occur.SHOULD); bq.setMinimumNumberShouldMatch(2); - assertQuery(bq, null); + assertQuery(bq); } } - void assertQuery(Query query, Filter filter) throws Exception { + void assertQuery(Query query) throws Exception { for (int i = 0; i < 10; i++) { boolean reversed = random().nextBoolean(); SortField fields[] = new SortField[] { @@ -114,13 +111,13 @@ public class TestExpressionSorts extends LuceneTestCase { }; Collections.shuffle(Arrays.asList(fields), random()); int numSorts = TestUtil.nextInt(random(), 1, fields.length); - assertQuery(query, filter, new Sort(Arrays.copyOfRange(fields, 0, numSorts))); + assertQuery(query, new Sort(Arrays.copyOfRange(fields, 0, numSorts))); } } - void assertQuery(Query query, Filter filter, Sort sort) throws Exception { + void assertQuery(Query query, Sort sort) throws Exception { int size = TestUtil.nextInt(random(), 1, searcher.getIndexReader().maxDoc() / 5); - TopDocs expected = searcher.search(query, filter, size, sort, random().nextBoolean(), random().nextBoolean()); + TopDocs expected = searcher.search(query, size, sort, random().nextBoolean(), random().nextBoolean()); // make our actual sort, mutating original by replacing some of the // sortfields with equivalent expressions @@ -141,12 +138,12 @@ public class TestExpressionSorts extends LuceneTestCase { } Sort mutatedSort = new Sort(mutated); - TopDocs actual = searcher.search(query, filter, size, mutatedSort, random().nextBoolean(), random().nextBoolean()); + TopDocs actual = searcher.search(query, size, mutatedSort, random().nextBoolean(), random().nextBoolean()); CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs); if (size < actual.totalHits) { - expected = searcher.searchAfter(expected.scoreDocs[size-1], query, filter, size, sort); - actual = searcher.searchAfter(actual.scoreDocs[size-1], query, filter, size, mutatedSort); + expected = searcher.searchAfter(expected.scoreDocs[size-1], query, size, sort); + actual = searcher.searchAfter(actual.scoreDocs[size-1], query, size, mutatedSort); CheckHits.checkEqual(query, expected.scoreDocs, actual.scoreDocs); } } diff --git a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java index 720933c4f76..7916e5044f2 100644 --- a/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java +++ b/lucene/facet/src/test/org/apache/lucene/facet/TestDrillSideways.java @@ -46,6 +46,7 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -752,7 +753,11 @@ public class TestDrillSideways extends FacetTestCase { verifyEquals(dimValues, s, expected, actual, scores, doUseDV); // Make sure drill down doesn't change score: - TopDocs ddqHits = s.search(ddq, filter, numDocs); + Query q = ddq; + if (filter != null) { + q = new FilteredQuery(q, filter); + } + TopDocs ddqHits = s.search(q, numDocs); assertEquals(expected.hits.size(), ddqHits.totalHits); for(int i=0;i TopGroups search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException { - return search(searcher, null, query, groupOffset, groupLimit); - } - - /** - * Executes a grouped search. Both the first pass and second pass are executed on the specified searcher. - * - * @param searcher The {@link org.apache.lucene.search.IndexSearcher} instance to execute the grouped search on. - * @param filter The filter to execute with the grouping - * @param query The query to execute with the grouping - * @param groupOffset The group offset - * @param groupLimit The number of groups to return from the specified group offset - * @return the grouped result as a {@link TopGroups} instance - * @throws IOException If any I/O related errors occur - */ @SuppressWarnings("unchecked") - public TopGroups search(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException { + public TopGroups search(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException { if (groupField != null || groupFunction != null) { - return groupByFieldOrFunction(searcher, filter, query, groupOffset, groupLimit); + return groupByFieldOrFunction(searcher, query, groupOffset, groupLimit); } else if (groupEndDocs != null) { - return (TopGroups) groupByDocBlock(searcher, filter, query, groupOffset, groupLimit); + return (TopGroups) groupByDocBlock(searcher, query, groupOffset, groupLimit); } else { throw new IllegalStateException("Either groupField, groupFunction or groupEndDocs must be set."); // This can't happen... } } @SuppressWarnings({"unchecked", "rawtypes"}) - protected TopGroups groupByFieldOrFunction(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException { + protected TopGroups groupByFieldOrFunction(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException { int topN = groupOffset + groupLimit; final AbstractFirstPassGroupingCollector firstPassCollector; final AbstractAllGroupsCollector allGroupsCollector; @@ -204,9 +189,9 @@ public class GroupingSearch { } else { cachedCollector = CachingCollector.create(firstRound, cacheScores, maxDocsToCache); } - searcher.search(query, filter, cachedCollector); + searcher.search(query, cachedCollector); } else { - searcher.search(query, filter, firstRound); + searcher.search(query, firstRound); } if (allGroups) { @@ -236,7 +221,7 @@ public class GroupingSearch { if (cachedCollector != null && cachedCollector.isCached()) { cachedCollector.replay(secondPassCollector); } else { - searcher.search(query, filter, secondPassCollector); + searcher.search(query, secondPassCollector); } if (allGroups) { @@ -246,10 +231,10 @@ public class GroupingSearch { } } - protected TopGroups groupByDocBlock(IndexSearcher searcher, Filter filter, Query query, int groupOffset, int groupLimit) throws IOException { + protected TopGroups groupByDocBlock(IndexSearcher searcher, Query query, int groupOffset, int groupLimit) throws IOException { int topN = groupOffset + groupLimit; BlockGroupingCollector c = new BlockGroupingCollector(groupSort, topN, includeScores, groupEndDocs); - searcher.search(query, filter, c); + searcher.search(query, c); int topNInsideGroup = groupDocsOffset + groupDocsLimit; return c.getTopGroups(sortWithinGroup, groupOffset, groupDocsOffset, topNInsideGroup, fillSortFields); } diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java index cbbfd609954..85f9741198d 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/GroupingSearchTest.java @@ -123,7 +123,7 @@ public class GroupingSearchTest extends LuceneTestCase { Sort groupSort = Sort.RELEVANCE; GroupingSearch groupingSearch = createRandomGroupingSearch(groupField, groupSort, 5, canUseIDV); - TopGroups groups = groupingSearch.search(indexSearcher, null, new TermQuery(new Term("content", "random")), 0, 10); + TopGroups groups = groupingSearch.search(indexSearcher, new TermQuery(new Term("content", "random")), 0, 10); assertEquals(7, groups.totalHitCount); assertEquals(7, groups.totalGroupedHitCount); @@ -161,7 +161,7 @@ public class GroupingSearchTest extends LuceneTestCase { Filter lastDocInBlock = new CachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("groupend", "x")))); groupingSearch = new GroupingSearch(lastDocInBlock); - groups = groupingSearch.search(indexSearcher, null, new TermQuery(new Term("content", "random")), 0, 10); + groups = groupingSearch.search(indexSearcher, new TermQuery(new Term("content", "random")), 0, 10); assertEquals(7, groups.totalHitCount); assertEquals(7, groups.totalGroupedHitCount); @@ -237,7 +237,7 @@ public class GroupingSearchTest extends LuceneTestCase { GroupingSearch gs = new GroupingSearch("group"); gs.setAllGroups(true); - TopGroups groups = gs.search(indexSearcher, null, new TermQuery(new Term("group", "foo")), 0, 10); + TopGroups groups = gs.search(indexSearcher, new TermQuery(new Term("group", "foo")), 0, 10); assertEquals(1, groups.totalHitCount); //assertEquals(1, groups.totalGroupCount.intValue()); assertEquals(1, groups.totalGroupedHitCount); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java index 0713c0dfa13..295992bc930 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java @@ -1014,7 +1014,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte // it rewrites to ConstantScoreQuery which cannot be highlighted // query = unReWrittenQuery.rewrite(reader); if (VERBOSE) System.out.println("Searching for: " + query.toString(FIELD_NAME)); - hits = searcher.search(query, null, 1000); + hits = searcher.search(query, 1000); for (int i = 0; i < hits.totalHits; i++) { final StoredDocument doc = searcher.doc(hits.scoreDocs[i].doc); @@ -1036,7 +1036,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte // try null field - hits = searcher.search(query, null, 1000); + hits = searcher.search(query, 1000); numHighlights = 0; @@ -1061,7 +1061,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte // try default field - hits = searcher.search(query, null, 1000); + hits = searcher.search(query, 1000); numHighlights = 0; @@ -1541,7 +1541,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte if (VERBOSE) System.out.println("Searching with primitive query"); // forget to set this and... // query=query.rewrite(reader); - TopDocs hits = searcher.search(query, null, 1000); + TopDocs hits = searcher.search(query, 1000); // create an instance of the highlighter with the tags used to surround // highlighted text @@ -1913,7 +1913,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte //Scorer scorer = new QueryTermScorer( query, "t_text1" ); Highlighter h = new Highlighter( scorer ); - TopDocs hits = searcher.search(query, null, 10); + TopDocs hits = searcher.search(query, 10); for( int i = 0; i < hits.totalHits; i++ ){ StoredDocument doc = searcher.doc( hits.scoreDocs[i].doc ); String result = h.getBestFragment( a, "t_text1", doc.get( "t_text1" )); @@ -1944,7 +1944,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte scorer.setUsePayloads(true); Highlighter h = new Highlighter(scorer); - TopDocs hits = searcher.search(query, null, 10); + TopDocs hits = searcher.search(query, 10); assertEquals(1, hits.scoreDocs.length); TokenStream stream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), 0, FIELD_NAME, analyzer); if (random().nextBoolean()) { @@ -1996,7 +1996,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte // you must use a rewritten query! query = unReWrittenQuery.rewrite(reader); if (VERBOSE) System.out.println("Searching for: " + query.toString(FIELD_NAME)); - hits = searcher.search(query, null, 1000); + hits = searcher.search(query, 1000); } public void assertExpectedHighlightCount(final int maxNumFragmentsRequired, diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java index 49f207226b3..0968dec1aaf 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestMultiTermHighlighting.java @@ -92,7 +92,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = new WildcardQuery(new Term("body", "te*")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -103,7 +103,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(new WildcardQuery(new Term("bogus", "te*")), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -144,7 +144,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = new PrefixQuery(new Term("body", "te")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -155,7 +155,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(new PrefixQuery(new Term("bogus", "te")), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -196,7 +196,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = new RegexpQuery(new Term("body", "te.*")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -207,7 +207,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(new RegexpQuery(new Term("bogus", "te.*")), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -248,7 +248,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = new FuzzyQuery(new Term("body", "tets"), 1); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -257,7 +257,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { // with prefix query = new FuzzyQuery(new Term("body", "tets"), 1, 2); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -268,7 +268,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(new FuzzyQuery(new Term("bogus", "tets"), 1), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -309,7 +309,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = TermRangeQuery.newStringRange("body", "ta", "tf", true, true); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -318,7 +318,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { // null start query = TermRangeQuery.newStringRange("body", null, "tf", true, true); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -327,7 +327,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { // null end query = TermRangeQuery.newStringRange("body", "ta", null, true, true); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -336,7 +336,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { // exact start inclusive query = TermRangeQuery.newStringRange("body", "test", "tf", true, true); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -345,7 +345,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { // exact end inclusive query = TermRangeQuery.newStringRange("body", "ta", "test", true, true); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -356,7 +356,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { BooleanQuery bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(TermRangeQuery.newStringRange("body", "test", "tf", false, true), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -367,7 +367,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(TermRangeQuery.newStringRange("body", "ta", "test", true, false), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -378,7 +378,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { bq = new BooleanQuery(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); bq.add(TermRangeQuery.newStringRange("bogus", "ta", "tf", true, true), BooleanClause.Occur.SHOULD); - topDocs = searcher.search(bq, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(bq, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", bq, searcher, topDocs); assertEquals(2, snippets.length); @@ -420,7 +420,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { }; BooleanQuery query = new BooleanQuery(); query.add(new WildcardQuery(new Term("body", "te*")), BooleanClause.Occur.SHOULD); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -431,7 +431,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { query = new BooleanQuery(); query.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); query.add(new WildcardQuery(new Term("bogus", "te*")), BooleanClause.Occur.MUST_NOT); - topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); snippets = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -474,7 +474,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { FilteredQuery query = new FilteredQuery( new WildcardQuery(new Term("body", "te*")), new QueryWrapperFilter(new TermQuery(new Term("body", "test")))); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -515,7 +515,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; ConstantScoreQuery query = new ConstantScoreQuery(new WildcardQuery(new Term("body", "te*"))); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -557,7 +557,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { }; DisjunctionMaxQuery query = new DisjunctionMaxQuery(0); query.add(new WildcardQuery(new Term("body", "te*"))); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -598,7 +598,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { } }; Query query = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -640,7 +640,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { }; SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))); Query query = new SpanOrQuery(new SpanQuery[] { childQuery }); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -682,7 +682,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { }; SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))); Query query = new SpanNearQuery(new SpanQuery[] { childQuery }, 0, true); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -725,7 +725,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { SpanQuery include = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))); SpanQuery exclude = new SpanTermQuery(new Term("body", "bogus")); Query query = new SpanNotQuery(include, exclude); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -767,7 +767,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { }; SpanQuery childQuery = new SpanMultiTermQueryWrapper<>(new WildcardQuery(new Term("body", "te*"))); Query query = new SpanFirstQuery(childQuery, 1000000); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -811,7 +811,7 @@ public class TestMultiTermHighlighting extends LuceneTestCase { query.add(new WildcardQuery(new Term("body", "te*")), BooleanClause.Occur.SHOULD); query.add(new WildcardQuery(new Term("body", "one")), BooleanClause.Occur.SHOULD); query.add(new WildcardQuery(new Term("body", "se*")), BooleanClause.Occur.SHOULD); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java index 177fcc092ee..8475679883c 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighter.java @@ -76,7 +76,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -145,7 +145,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); PostingsHighlighter highlighter = new PostingsHighlighter(maxLength); @@ -179,7 +179,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); @@ -214,7 +214,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -251,7 +251,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(40); Query query = new TermQuery(new Term("body", "field")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); @@ -291,7 +291,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("title", "best")), BooleanClause.Occur.SHOULD); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); Map snippets = highlighter.highlightFields(new String [] { "body", "title" }, query, searcher, topDocs); assertEquals(2, snippets.size()); @@ -329,7 +329,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { query.add(new TermQuery(new Term("body", "highlighting")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "just")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "first")), BooleanClause.Occur.SHOULD); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(2, snippets.length); @@ -363,7 +363,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(2, snippets.length); @@ -401,7 +401,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); try { highlighter.highlight("body", query, searcher, topDocs, 2); @@ -539,7 +539,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); @@ -603,7 +603,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { } }; Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); @@ -636,7 +636,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { IndexSearcher searcher = newSearcher(ir); PostingsHighlighter highlighter = new PostingsHighlighter(); Query query = new TermQuery(new Term("body", "highlighting")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(2, topDocs.totalHits); ScoreDoc[] hits = topDocs.scoreDocs; int[] docIDs = new int[2]; @@ -688,7 +688,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { }; Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 2); assertEquals(1, snippets.length); @@ -1015,7 +1015,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { } }; Query query = new TermQuery(new Term("body", "highlighting")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); @@ -1059,7 +1059,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { } }; Query query = new TermQuery(new Term("body", "field")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs); assertEquals(1, snippets.length); @@ -1106,7 +1106,7 @@ public class TestPostingsHighlighter extends LuceneTestCase { }; Query query = new TermQuery(new Term("body", "highlighting")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); int[] docIDs = new int[1]; docIDs[0] = topDocs.scoreDocs[0].doc; diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java index df3d7b6c238..29b06f53c40 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/postingshighlight/TestPostingsHighlighterRanking.java @@ -272,7 +272,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase { } }; Query query = new TermQuery(new Term("body", "test")); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1); assertEquals(1, snippets.length); @@ -313,7 +313,7 @@ public class TestPostingsHighlighterRanking extends LuceneTestCase { BooleanQuery query = new BooleanQuery(); query.add(new TermQuery(new Term("body", "foo")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("body", "bar")), BooleanClause.Occur.SHOULD); - TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER); + TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER); assertEquals(1, topDocs.totalHits); String snippets[] = highlighter.highlight("body", query, searcher, topDocs, 1); assertEquals(1, snippets.length); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index 0a3dcfb9084..65263dfa678 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -51,6 +51,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiTermQuery; @@ -246,8 +247,8 @@ public class TestBlockJoin extends LuceneTestCase { assertEquals("Lisa", getParentDoc(r, parentsFilter, hits.scoreDocs[0].doc).get("name")); // Test with filter on child docs: - assertEquals(0, s.search(fullChildQuery, - new QueryWrapperFilter(new TermQuery(new Term("skill", "foosball"))), + assertEquals(0, s.search(new FilteredQuery(fullChildQuery, + new QueryWrapperFilter(new TermQuery(new Term("skill", "foosball")))), 1).totalHits); r.close(); @@ -354,20 +355,20 @@ public class TestBlockJoin extends LuceneTestCase { assertEquals("no filter - both passed", 2, s.search(childJoinQuery, 10).totalHits); - assertEquals("dummy filter passes everyone ", 2, s.search(childJoinQuery, parentsFilter, 10).totalHits); - assertEquals("dummy filter passes everyone ", 2, s.search(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("docType", "resume"))), 10).totalHits); + assertEquals("dummy filter passes everyone ", 2, s.search(new FilteredQuery(childJoinQuery, parentsFilter), 10).totalHits); + assertEquals("dummy filter passes everyone ", 2, s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("docType", "resume")))), 10).totalHits); // not found test - assertEquals("noone live there", 0, s.search(childJoinQuery, new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("country", "Oz")))), 1).totalHits); - assertEquals("noone live there", 0, s.search(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("country", "Oz"))), 1).totalHits); + assertEquals("noone live there", 0, s.search(new FilteredQuery(childJoinQuery, new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("country", "Oz"))))), 1).totalHits); + assertEquals("noone live there", 0, s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(new TermQuery(new Term("country", "Oz")))), 1).totalHits); // apply the UK filter by the searcher - TopDocs ukOnly = s.search(childJoinQuery, new QueryWrapperFilter(parentQuery), 1); + TopDocs ukOnly = s.search(new FilteredQuery(childJoinQuery, new QueryWrapperFilter(parentQuery)), 1); assertEquals("has filter - single passed", 1, ukOnly.totalHits); assertEquals( "Lisa", r.document(ukOnly.scoreDocs[0].doc).get("name")); // looking for US candidates - TopDocs usThen = s.search(childJoinQuery , new QueryWrapperFilter(new TermQuery(new Term("country", "United States"))), 1); + TopDocs usThen = s.search(new FilteredQuery(childJoinQuery , new QueryWrapperFilter(new TermQuery(new Term("country", "United States")))), 1); assertEquals("has filter - single passed", 1, usThen.totalHits); assertEquals("Frank", r.document(usThen.scoreDocs[0].doc).get("name")); @@ -377,14 +378,14 @@ public class TestBlockJoin extends LuceneTestCase { s.search(new ToChildBlockJoinQuery(us, parentsFilter), 10).totalHits ); - assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter), - skill("java"), 10).totalHits ); + assertEquals("java skills in US", 1, s.search(new FilteredQuery(new ToChildBlockJoinQuery(us, parentsFilter), + skill("java")), 10).totalHits ); BooleanQuery rubyPython = new BooleanQuery(); rubyPython.add(new TermQuery(new Term("skill", "ruby")), Occur.SHOULD); rubyPython.add(new TermQuery(new Term("skill", "python")), Occur.SHOULD); - assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter), - new QueryWrapperFilter(rubyPython), 10).totalHits ); + assertEquals("ruby skills in US", 1, s.search(new FilteredQuery(new ToChildBlockJoinQuery(us, parentsFilter), + new QueryWrapperFilter(rubyPython)), 10).totalHits ); r.close(); dir.close(); @@ -786,7 +787,7 @@ public class TestBlockJoin extends LuceneTestCase { sortFields.addAll(Arrays.asList(childSort.getSort())); final Sort parentAndChildSort = new Sort(sortFields.toArray(new SortField[sortFields.size()])); - final TopDocs results = s.search(parentQuery, null, r.numDocs(), + final TopDocs results = s.search(parentQuery, r.numDocs(), parentAndChildSort); if (VERBOSE) { @@ -922,30 +923,24 @@ public class TestBlockJoin extends LuceneTestCase { final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter); // To run against the block-join index: - final Query childJoinQuery2; + Query childJoinQuery2; // Same query as parentJoinQuery, but to run against // the fully denormalized index (so we can compare // results): - final Query childQuery2; - - // apply a filter to children - final Filter childFilter2, childJoinFilter2; + Query childQuery2; if (random().nextBoolean()) { childQuery2 = parentQuery2; childJoinQuery2 = parentJoinQuery2; - childFilter2 = null; - childJoinFilter2 = null; } else { final Term childTerm = randomChildTerm(childFields[0]); if (random().nextBoolean()) { // filtered case childJoinQuery2 = parentJoinQuery2; final Filter f = new QueryWrapperFilter(new TermQuery(childTerm)); - childJoinFilter2 = random().nextBoolean() - ? new BitDocIdSetCachingWrapperFilter(f): f; + childJoinQuery2 = new FilteredQuery(childJoinQuery2, random().nextBoolean() + ? new BitDocIdSetCachingWrapperFilter(f): f); } else { - childJoinFilter2 = null; // AND child field w/ parent query: final BooleanQuery bq = new BooleanQuery(); childJoinQuery2 = bq; @@ -963,10 +958,9 @@ public class TestBlockJoin extends LuceneTestCase { if (random().nextBoolean()) { // filtered case childQuery2 = parentQuery2; final Filter f = new QueryWrapperFilter(new TermQuery(childTerm)); - childFilter2 = random().nextBoolean() - ? new BitDocIdSetCachingWrapperFilter(f): f; + childQuery2 = new FilteredQuery(childQuery2, random().nextBoolean() + ? new BitDocIdSetCachingWrapperFilter(f): f); } else { - childFilter2 = null; final BooleanQuery bq2 = new BooleanQuery(); childQuery2 = bq2; if (random().nextBoolean()) { @@ -985,11 +979,9 @@ public class TestBlockJoin extends LuceneTestCase { // Search denormalized index: if (VERBOSE) { - System.out.println("TEST: run top down query=" + childQuery2 + - " filter=" + childFilter2 + - " sort=" + childSort2); + System.out.println("TEST: run top down query=" + childQuery2 + " sort=" + childSort2); } - final TopDocs results2 = s.search(childQuery2, childFilter2, r.numDocs(), + final TopDocs results2 = s.search(childQuery2, r.numDocs(), childSort2); if (VERBOSE) { System.out.println(" " + results2.totalHits + " totalHits:"); @@ -1001,10 +993,9 @@ public class TestBlockJoin extends LuceneTestCase { // Search join index: if (VERBOSE) { - System.out.println("TEST: run top down join query=" + childJoinQuery2 + - " filter=" + childJoinFilter2 + " sort=" + childSort2); + System.out.println("TEST: run top down join query=" + childJoinQuery2 + " sort=" + childSort2); } - TopDocs joinResults2 = joinS.search(childJoinQuery2, childJoinFilter2, joinR.numDocs(), childSort2); + TopDocs joinResults2 = joinS.search(childJoinQuery2, joinR.numDocs(), childSort2); if (VERBOSE) { System.out.println(" " + joinResults2.totalHits + " totalHits:"); for(ScoreDoc sd : joinResults2.scoreDocs) { diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java index 8d35c41443b..380fc1b4540 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; @@ -121,7 +122,7 @@ public class TestBlockJoinValidation extends LuceneTestCase { Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("common_field", "1"))); thrown.expect(IllegalStateException.class); thrown.expectMessage(ToChildBlockJoinQuery.ILLEGAL_ADVANCE_ON_PARENT); - indexSearcher.search(blockJoinQuery, childFilter, 1); + indexSearcher.search(new FilteredQuery(blockJoinQuery, childFilter), 1); } @Test diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java index 541e7728e1d..ec911b2c93b 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSort.java @@ -1006,7 +1006,7 @@ public class TestFieldCacheSort extends LuceneTestCase { Collections.singletonMap("f", Type.SORTED)); w.close(); IndexSearcher s = newSearcher(r); - TopDocs hits = s.search(new TermQuery(new Term("t", "1")), null, 10, new Sort(new SortField("f", SortField.Type.STRING))); + TopDocs hits = s.search(new TermQuery(new Term("t", "1")), 10, new Sort(new SortField("f", SortField.Type.STRING))); assertEquals(2, hits.totalHits); // null sorts first assertEquals(1, hits.scoreDocs[0].doc); @@ -1035,7 +1035,7 @@ public class TestFieldCacheSort extends LuceneTestCase { Collections.singletonMap("string", Type.SORTED)); IndexSearcher searcher = new IndexSearcher(reader); try { - searcher.search(new MatchAllDocsQuery(), null, 500, sort); + searcher.search(new MatchAllDocsQuery(), 500, sort); fail("didn't get expected exception"); } catch (IllegalStateException expected) {} reader.close(); @@ -1069,10 +1069,10 @@ public class TestFieldCacheSort extends LuceneTestCase { Query q = new TermQuery(new Term("body", "text")); IndexSearcher s = newSearcher(r); float maxScore = s.search(q , 10).getMaxScore(); - assertEquals(maxScore, s.search(q, null, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0); - assertEquals(maxScore, s.search(q, null, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0); - assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0); - assertEquals(maxScore, s.search(q, null, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0); + assertEquals(maxScore, s.search(q, 3, Sort.INDEXORDER, random().nextBoolean(), true).getMaxScore(), 0.0); + assertEquals(maxScore, s.search(q, 3, Sort.RELEVANCE, random().nextBoolean(), true).getMaxScore(), 0.0); + assertEquals(maxScore, s.search(q, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, false)}), random().nextBoolean(), true).getMaxScore(), 0.0); + assertEquals(maxScore, s.search(q, 3, new Sort(new SortField[] {new SortField("id", SortField.Type.INT, true)}), random().nextBoolean(), true).getMaxScore(), 0.0); TestUtil.checkReader(r); r.close(); d.close(); @@ -1084,27 +1084,27 @@ public class TestFieldCacheSort extends LuceneTestCase { Query query = new TermQuery(new Term("contents", "foo")); Sort sort = new Sort(); - TopDocs td = empty.search(query, null, 10, sort, true, true); + TopDocs td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); sort.setSort(SortField.FIELD_DOC); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); sort.setSort(new SortField("int", SortField.Type.INT), SortField.FIELD_DOC); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); sort.setSort(new SortField("string", SortField.Type.STRING, true), SortField.FIELD_DOC); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); sort.setSort(new SortField("string_val", SortField.Type.STRING_VAL, true), SortField.FIELD_DOC); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); sort.setSort(new SortField("float", SortField.Type.FLOAT), new SortField("string", SortField.Type.STRING)); - td = empty.search(query, null, 10, sort, true, true); + td = empty.search(query, 10, sort, true, true); assertEquals(0, td.totalHits); } @@ -1146,7 +1146,7 @@ public class TestFieldCacheSort extends LuceneTestCase { TopDocs expected = searcher.search(new TermQuery(new Term("value", "foo")), 10); assertEquals(1, expected.totalHits); - TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), null, 10, sort, true, true); + TopDocs actual = searcher.search(new TermQuery(new Term("value", "foo")), 10, sort, true, true); assertEquals(expected.totalHits, actual.totalHits); assertEquals(expected.scoreDocs[0].score, actual.scoreDocs[0].score, 0F); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java index 9444257053f..7d1c24b40de 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestFieldCacheSortRandom.java @@ -43,6 +43,7 @@ import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; @@ -173,13 +174,13 @@ public class TestFieldCacheSortRandom extends LuceneTestCase { // Set minNrShouldMatch to 1 so that BQ will not optimize rewrite to return // the clause instead of BQ. bq.setMinimumNumberShouldMatch(1); - hits = s.search(bq, f, hitCount, sort, random.nextBoolean(), random.nextBoolean()); + hits = s.search(new FilteredQuery(bq, f), hitCount, sort, random.nextBoolean(), random.nextBoolean()); } else if (queryType == 1) { hits = s.search(new ConstantScoreQuery(f), - null, hitCount, sort, random.nextBoolean(), random.nextBoolean()); + hitCount, sort, random.nextBoolean(), random.nextBoolean()); } else { - hits = s.search(new MatchAllDocsQuery(), - f, hitCount, sort, random.nextBoolean(), random.nextBoolean()); + hits = s.search(new FilteredQuery(new MatchAllDocsQuery(), + f), hitCount, sort, random.nextBoolean(), random.nextBoolean()); } if (VERBOSE) { diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java index 99cf20544ce..1cbaea14c8a 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms32.java @@ -127,7 +127,7 @@ public class TestNumericTerms32 extends LuceneTestCase { int a=lower; lower=upper; upper=a; } Query tq=NumericRangeQuery.newIntRange(field, precisionStep, lower, upper, true, true); - TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.Type.INT, true))); + TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.INT, true))); if (topDocs.totalHits==0) continue; ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); diff --git a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java index 4d4bf9a7287..db66c0e9a9d 100644 --- a/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java +++ b/lucene/misc/src/test/org/apache/lucene/uninverting/TestNumericTerms64.java @@ -132,7 +132,7 @@ public class TestNumericTerms64 extends LuceneTestCase { long a=lower; lower=upper; upper=a; } Query tq=NumericRangeQuery.newLongRange(field, precisionStep, lower, upper, true, true); - TopDocs topDocs = searcher.search(tq, null, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true))); + TopDocs topDocs = searcher.search(tq, noDocs, new Sort(new SortField(field, SortField.Type.LONG, true))); if (topDocs.totalHits==0) continue; ScoreDoc[] sd = topDocs.scoreDocs; assertNotNull(sd); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java index 1709a015c61..7a2d38d630e 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/TestCustomScoreQuery.java @@ -263,11 +263,11 @@ public class TestCustomScoreQuery extends FunctionTestSetup { log(q5CustomMulAdd); // do al the searches - TopDocs td1 = s.search(q1, null, 1000); - TopDocs td2CustomNeutral = s.search(q2CustomNeutral, null, 1000); - TopDocs td3CustomMul = s.search(q3CustomMul, null, 1000); - TopDocs td4CustomAdd = s.search(q4CustomAdd, null, 1000); - TopDocs td5CustomMulAdd = s.search(q5CustomMulAdd, null, 1000); + TopDocs td1 = s.search(q1, 1000); + TopDocs td2CustomNeutral = s.search(q2CustomNeutral, 1000); + TopDocs td3CustomMul = s.search(q3CustomMul, 1000); + TopDocs td4CustomAdd = s.search(q4CustomAdd, 1000); + TopDocs td5CustomMulAdd = s.search(q5CustomMulAdd, 1000); // put results in map so we can verify the scores although they have changed Map h1 = topDocsToMap(td1); diff --git a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java index f3503587a1e..3ccd8c2ec27 100644 --- a/lucene/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java +++ b/lucene/queries/src/test/org/apache/lucene/queries/function/TestFieldScoreQuery.java @@ -64,7 +64,7 @@ public class TestFieldScoreQuery extends FunctionTestSetup { IndexSearcher s = newSearcher(r); log("test: "+ functionQuery); QueryUtils.check(random(), functionQuery,s); - ScoreDoc[] h = s.search(functionQuery, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(functionQuery, 1000).scoreDocs; assertEquals("All docs should be matched!",N_DOCS,h.length); String prevID = "ID"+(N_DOCS+1); // greater than all ids of docs in this test for (int i=0; i leaves = searcher.getTopReaderContext().leaves(); Assume.assumeTrue(leaves.size() == 1); Query q = parse("DuplicateFilterQuery.xml"); - int h = searcher.search(q, null, 1000).totalHits; + int h = searcher.search(q, 1000).totalHits; assertEquals("DuplicateFilterQuery should produce 1 result ", 1, h); } @@ -217,7 +217,7 @@ public class TestParser extends LuceneTestCase { if (VERBOSE) { System.out.println("TEST: query=" + q); } - TopDocs hits = searcher.search(q, null, numDocs); + TopDocs hits = searcher.search(q, numDocs); assertTrue(qType + " should produce results ", hits.totalHits > 0); if (VERBOSE) { System.out.println("=========" + qType + "============"); diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java index 6b2d52c514e..66d93e5e061 100644 --- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java +++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestQueryTemplateManager.java @@ -98,7 +98,7 @@ public class TestQueryTemplateManager extends LuceneTestCase { Query q = builder.getQuery(doc.getDocumentElement()); //Run the query - int h = searcher.search(q, null, 1000).totalHits; + int h = searcher.search(q, 1000).totalHits; //Check we have the expected number of results int expectedHits = Integer.parseInt(queryFormProperties.getProperty("expectedMatches")); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java index 6ba5e5613e0..921dd1fcbaa 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/DuplicateFilterTest.java @@ -25,6 +25,7 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.*; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; @@ -84,7 +85,7 @@ public class DuplicateFilterTest extends LuceneTestCase { public void testDefaultFilter() throws Throwable { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); HashSet results = new HashSet<>(); - ScoreDoc[] hits = searcher.search(tq, df, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new FilteredQuery(tq, df), 1000).scoreDocs; for (ScoreDoc hit : hits) { StoredDocument d = searcher.doc(hit.doc); @@ -96,7 +97,7 @@ public class DuplicateFilterTest extends LuceneTestCase { public void testNoFilter() throws Throwable { HashSet results = new HashSet<>(); - ScoreDoc[] hits = searcher.search(tq, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(tq, 1000).scoreDocs; assertTrue("Default searching should have found some matches", hits.length > 0); boolean dupsFound = false; @@ -114,7 +115,7 @@ public class DuplicateFilterTest extends LuceneTestCase { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.setProcessingMode(DuplicateFilter.ProcessingMode.PM_FAST_INVALIDATION); HashSet results = new HashSet<>(); - ScoreDoc[] hits = searcher.search(tq, df, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new FilteredQuery(tq, df), 1000).scoreDocs; assertTrue("Filtered searching should have found some matches", hits.length > 0); for (ScoreDoc hit : hits) { @@ -129,7 +130,7 @@ public class DuplicateFilterTest extends LuceneTestCase { public void testKeepsLastFilter() throws Throwable { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.setKeepMode(DuplicateFilter.KeepMode.KM_USE_LAST_OCCURRENCE); - ScoreDoc[] hits = searcher.search(tq, df, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new FilteredQuery(tq, df), 1000).scoreDocs; assertTrue("Filtered searching should have found some matches", hits.length > 0); for (ScoreDoc hit : hits) { StoredDocument d = searcher.doc(hit.doc); @@ -153,7 +154,7 @@ public class DuplicateFilterTest extends LuceneTestCase { public void testKeepsFirstFilter() throws Throwable { DuplicateFilter df = new DuplicateFilter(KEY_FIELD); df.setKeepMode(DuplicateFilter.KeepMode.KM_USE_FIRST_OCCURRENCE); - ScoreDoc[] hits = searcher.search(tq, df, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(new FilteredQuery(tq, df), 1000).scoreDocs; assertTrue("Filtered searching should have found some matches", hits.length > 0); for (ScoreDoc hit : hits) { StoredDocument d = searcher.doc(hit.doc); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery.java index 23b2d03cbb9..731d06099e2 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowFuzzyQuery.java @@ -60,32 +60,32 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { writer.close(); SlowFuzzyQuery query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 0); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); // same with prefix query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(2, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 6); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // test scoring query = new SlowFuzzyQuery(new Term("field", "bbbbb"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("3 documents should match", 3, hits.length); List order = Arrays.asList("bbbbb","abbbb","aabbb"); for (int i = 0; i < hits.length; i++) { @@ -97,7 +97,7 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { // test pq size by supplying maxExpansions=2 // This query would normally return 3 documents, because 3 terms match (see above): query = new SlowFuzzyQuery(new Term("field", "bbbbb"), SlowFuzzyQuery.defaultMinSimilarity, 0, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("only 2 documents should match", 2, hits.length); order = Arrays.asList("bbbbb","abbbb"); for (int i = 0; i < hits.length; i++) { @@ -108,15 +108,15 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { // not similar enough: query = new SlowFuzzyQuery(new Term("field", "xxxxx"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new SlowFuzzyQuery(new Term("field", "aaccc"), SlowFuzzyQuery.defaultMinSimilarity, 0); // edit distance to "aaaaa" = 3 - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // query identical to a word in the index: query = new SlowFuzzyQuery(new Term("field", "aaaaa"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); // default allows for up to two edits: @@ -125,7 +125,7 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { // query similar to a word in the index: query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); @@ -133,63 +133,63 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { // now with prefix query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); assertEquals(searcher.doc(hits[2].doc).get("field"), ("aaabb")); query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(2, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaa")); assertEquals(searcher.doc(hits[1].doc).get("field"), ("aaaab")); query = new SlowFuzzyQuery(new Term("field", "aaaac"), SlowFuzzyQuery.defaultMinSimilarity, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); // now with prefix query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 3); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("ddddd")); query = new SlowFuzzyQuery(new Term("field", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // different field = no match: query = new SlowFuzzyQuery(new Term("anotherfield", "ddddX"), SlowFuzzyQuery.defaultMinSimilarity, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); reader.close(); @@ -210,11 +210,11 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { SlowFuzzyQuery query; query = new SlowFuzzyQuery(new Term("field", "abcxxxx"), 3f, 0); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new SlowFuzzyQuery(new Term("field", "abcxxxx"), 4f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); reader.close(); directory.close(); @@ -233,63 +233,63 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { SlowFuzzyQuery query; // not similar enough: query = new SlowFuzzyQuery(new Term("field", "xxxxx"), 0.5f, 0); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // edit distance to "aaaaaaa" = 3, this matches because the string is longer than // in testDefaultFuzziness so a bigger difference is allowed: query = new SlowFuzzyQuery(new Term("field", "aaaaccc"), 0.5f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaaaa")); // now with prefix query = new SlowFuzzyQuery(new Term("field", "aaaaccc"), 0.5f, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaaaa")); query = new SlowFuzzyQuery(new Term("field", "aaaaccc"), 0.5f, 4); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); assertEquals(searcher.doc(hits[0].doc).get("field"), ("aaaaaaa")); query = new SlowFuzzyQuery(new Term("field", "aaaaccc"), 0.5f, 5); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // no match, more than half of the characters is wrong: query = new SlowFuzzyQuery(new Term("field", "aaacccc"), 0.5f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // now with prefix query = new SlowFuzzyQuery(new Term("field", "aaacccc"), 0.5f, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // "student" and "stellent" are indeed similar to "segment" by default: query = new SlowFuzzyQuery(new Term("field", "student"), 0.5f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); query = new SlowFuzzyQuery(new Term("field", "stellent"), 0.5f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // now with prefix query = new SlowFuzzyQuery(new Term("field", "student"), 0.5f, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); query = new SlowFuzzyQuery(new Term("field", "stellent"), 0.5f, 1); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); query = new SlowFuzzyQuery(new Term("field", "student"), 0.5f, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); query = new SlowFuzzyQuery(new Term("field", "stellent"), 0.5f, 2); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // "student" doesn't match anymore thanks to increased minimum similarity: query = new SlowFuzzyQuery(new Term("field", "student"), 0.6f, 0); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); try { @@ -363,22 +363,22 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { Query query; // term not over 10 chars, so optimization shortcuts query = new SlowFuzzyQuery(new Term("field", "1234569"), 0.9f); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // 10 chars, so no optimization query = new SlowFuzzyQuery(new Term("field", "1234567891"), 0.9f); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); // over 10 chars, so no optimization query = new SlowFuzzyQuery(new Term("field", "12345678911"), 0.9f); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(1, hits.length); // over 10 chars, no match query = new SlowFuzzyQuery(new Term("field", "sdfsdfsdfsdf"), 0.9f); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals(0, hits.length); reader.close(); @@ -399,7 +399,7 @@ public class TestSlowFuzzyQuery extends LuceneTestCase { SlowFuzzyQuery query = new SlowFuzzyQuery(new Term("field", "lucene")); query.setRewriteMethod(new MultiTermQuery.TopTermsBoostOnlyBooleanQueryRewrite(50)); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals(3, hits.length); // normally, 'Lucenne' would be the first result as IDF will skew the score. assertEquals("Lucene", reader.document(hits[0].doc).get("field")); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestRegexQuery.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestRegexQuery.java index a2e517bf568..db51fc848a2 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestRegexQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestRegexQuery.java @@ -69,7 +69,7 @@ public class TestRegexQuery extends LuceneTestCase { if ( capability != null ) query.setRegexImplementation(capability); - return searcher.search(query, null, 1000).totalHits; + return searcher.search(query, 1000).totalHits; } private int spanRegexQueryNrHits(String regex1, String regex2, int slop, boolean ordered) throws Exception { @@ -77,7 +77,7 @@ public class TestRegexQuery extends LuceneTestCase { SpanQuery srq2 = new SpanMultiTermQueryWrapper<>(new RegexQuery(newTerm(regex2))); SpanNearQuery query = new SpanNearQuery( new SpanQuery[]{srq1, srq2}, slop, ordered); - return searcher.search(query, null, 1000).totalHits; + return searcher.search(query, 1000).totalHits; } public void testMatchAll() throws Exception { diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java index 940fe774a95..d91dc30686e 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/regex/TestSpanRegexQuery.java @@ -73,7 +73,7 @@ public class TestSpanRegexQuery extends LuceneTestCase { SpanFirstQuery sfq = new SpanFirstQuery(srq, 1); // SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {srq, stq}, 6, // true); - int numHits = searcher.search(sfq, null, 1000).totalHits; + int numHits = searcher.search(sfq, 1000).totalHits; assertEquals(1, numHits); reader.close(); directory.close(); diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java index 9e3a8ecc6b9..b51d5181dcd 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java +++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestTermAutomatonQuery.java @@ -575,15 +575,16 @@ public class TestTermAutomatonQuery extends LuceneTestCase { System.out.println(q.toDot()); } - Filter filter; + Query q1 = q; + Query q2 = bq; if (random().nextInt(5) == 1) { - filter = new RandomFilter(random().nextLong(), random().nextFloat()); - } else { - filter = null; + RandomFilter filter = new RandomFilter(random().nextLong(), random().nextFloat()); + q1 = new FilteredQuery(q1, filter); + q2 = new FilteredQuery(q2, filter); } - TopDocs hits1 = s.search(q, filter, numDocs); - TopDocs hits2 = s.search(bq, filter, numDocs); + TopDocs hits1 = s.search(q1, numDocs); + TopDocs hits2 = s.search(q2, numDocs); Set hits1Docs = toDocIDs(s, hits1); Set hits2Docs = toDocIDs(s, hits2); diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/SpatialExample.java b/lucene/spatial/src/test/org/apache/lucene/spatial/SpatialExample.java index 46d1e193125..cda0e87d66a 100644 --- a/lucene/spatial/src/test/org/apache/lucene/spatial/SpatialExample.java +++ b/lucene/spatial/src/test/org/apache/lucene/spatial/SpatialExample.java @@ -21,6 +21,7 @@ import com.spatial4j.core.context.SpatialContext; import com.spatial4j.core.distance.DistanceUtils; import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Shape; + import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.IntField; @@ -33,6 +34,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.StoredDocument; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; @@ -152,7 +154,7 @@ public class SpatialExample extends LuceneTestCase { SpatialArgs args = new SpatialArgs(SpatialOperation.Intersects, ctx.makeCircle(-80.0, 33.0, DistanceUtils.dist2Degrees(200, DistanceUtils.EARTH_MEAN_RADIUS_KM))); Filter filter = strategy.makeFilter(args); - TopDocs docs = indexSearcher.search(new MatchAllDocsQuery(), filter, 10, idSort); + TopDocs docs = indexSearcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), 10, idSort); assertDocMatchedIds(indexSearcher, docs, 2); //Now, lets get the distance for the 1st doc via computing from stored point value: // (this computation is usually not redundant) diff --git a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java index 42f7524f8b1..7f75099e940 100644 --- a/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java +++ b/lucene/spatial/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java @@ -31,7 +31,9 @@ import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.SpatialRelation; import com.spatial4j.core.shape.impl.RectangleImpl; + import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.spatial.StrategyTestCase; @@ -235,7 +237,7 @@ public class HeatmapFacetCounterTest extends StrategyTestCase { Filter filter = new IntersectsPrefixTreeFilter( pt, strategy.getFieldName(), grid, facetLevel, grid.getMaxLevels(), !strategy.isPointsOnly()); final TotalHitCountCollector collector = new TotalHitCountCollector(); - indexSearcher.search(new MatchAllDocsQuery(), filter, collector); + indexSearcher.search(new FilteredQuery(new MatchAllDocsQuery(), filter), collector); cellsValidated++; if (collector.getTotalHits() > 0) { cellValidatedNonZero++; diff --git a/lucene/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java b/lucene/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java index 285296b54bf..c2d7231e963 100644 --- a/lucene/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java +++ b/lucene/suggest/src/java/org/apache/lucene/search/spell/SpellChecker.java @@ -364,7 +364,7 @@ public class SpellChecker implements java.io.Closeable { int maxHits = 10 * numSug; // System.out.println("Q: " + query); - ScoreDoc[] hits = indexSearcher.search(query, null, maxHits).scoreDocs; + ScoreDoc[] hits = indexSearcher.search(query, maxHits).scoreDocs; // System.out.println("HITS: " + hits.length()); SuggestWordQueue sugQueue = new SuggestWordQueue(numSug, comparator); diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java index e4e0877024b..d7eeac78f02 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/CollationTestBase.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.StorableField; import org.apache.lucene.index.StoredDocument; import org.apache.lucene.index.Term; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; @@ -77,11 +78,11 @@ public abstract class CollationTestBase extends LuceneTestCase { // Collator (or an Arabic one for the case when Farsi searcher not // supported). ScoreDoc[] result = searcher.search - (query, new TermRangeFilter("content", firstBeg, firstEnd, true, true), 1).scoreDocs; + (new FilteredQuery(query, new TermRangeFilter("content", firstBeg, firstEnd, true, true)), 1).scoreDocs; assertEquals("The index Term should not be included.", 0, result.length); result = searcher.search - (query, new TermRangeFilter("content", secondBeg, secondEnd, true, true), 1).scoreDocs; + (new FilteredQuery(query, new TermRangeFilter("content", secondBeg, secondEnd, true, true)), 1).scoreDocs; assertEquals("The index Term should be included.", 1, result.length); reader.close(); @@ -106,11 +107,11 @@ public abstract class CollationTestBase extends LuceneTestCase { IndexSearcher searcher = new IndexSearcher(reader); Query query = new TermRangeQuery("content", firstBeg, firstEnd, true, true); - ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs; + ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; assertEquals("The index Term should not be included.", 0, hits.length); query = new TermRangeQuery("content", secondBeg, secondEnd, true, true); - hits = searcher.search(query, null, 1000).scoreDocs; + hits = searcher.search(query, 1000).scoreDocs; assertEquals("The index Term should be included.", 1, hits.length); reader.close(); dir.close(); @@ -137,12 +138,12 @@ public abstract class CollationTestBase extends LuceneTestCase { // not supported). Query csrq = new TermRangeQuery("content", firstBeg, firstEnd, true, true); - ScoreDoc[] result = search.search(csrq, null, 1000).scoreDocs; + ScoreDoc[] result = search.search(csrq, 1000).scoreDocs; assertEquals("The index Term should not be included.", 0, result.length); csrq = new TermRangeQuery ("content", secondBeg, secondEnd, true, true); - result = search.search(csrq, null, 1000).scoreDocs; + result = search.search(csrq, 1000).scoreDocs; assertEquals("The index Term should be included.", 1, result.length); reader.close(); farsiIndex.close(); @@ -152,7 +153,7 @@ public abstract class CollationTestBase extends LuceneTestCase { // Copied from TestSort.java private void assertMatches(IndexSearcher searcher, Query query, Sort sort, String expectedResult) throws IOException { - ScoreDoc[] result = searcher.search(query, null, 1000, sort).scoreDocs; + ScoreDoc[] result = searcher.search(query, 1000, sort).scoreDocs; StringBuilder buff = new StringBuilder(10); int n = result.length; for (int i = 0 ; i < n ; ++i) { diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java index 427a85872ab..139f64face4 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/BaseDocValuesFormatTestCase.java @@ -111,7 +111,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -143,7 +143,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -176,7 +176,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -211,7 +211,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -248,7 +248,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -285,7 +285,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -326,7 +326,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); BytesRef scratch = new BytesRef(); // Iterate through the results: @@ -488,7 +488,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); // Iterate through the results: for (int i = 0; i < hits.scoreDocs.length; i++) { @@ -592,7 +592,7 @@ public abstract class BaseDocValuesFormatTestCase extends BaseIndexFileFormatTes assertEquals(1, isearcher.search(new TermQuery(new Term("fieldname", longTerm)), 1).totalHits); Query query = new TermQuery(new Term("fieldname", "text")); - TopDocs hits = isearcher.search(query, null, 1); + TopDocs hits = isearcher.search(query, 1); assertEquals(1, hits.totalHits); BytesRef scratch = new BytesRef(); // Iterate through the results: diff --git a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java index ef87bb70ce1..e4d625f03b9 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/index/ThreadedIndexingAndSearchingTestCase.java @@ -665,9 +665,9 @@ public abstract class ThreadedIndexingAndSearchingTestCase extends LuceneTestCas private int runQuery(IndexSearcher s, Query q) throws Exception { s.search(q, 10); - int hitCount = s.search(q, null, 10, new Sort(new SortField("titleDV", SortField.Type.STRING))).totalHits; + int hitCount = s.search(q, 10, new Sort(new SortField("titleDV", SortField.Type.STRING))).totalHits; final Sort dvSort = new Sort(new SortField("titleDV", SortField.Type.STRING)); - int hitCount2 = s.search(q, null, 10, dvSort).totalHits; + int hitCount2 = s.search(q, 10, dvSort).totalHits; assertEquals(hitCount, hitCount2); return hitCount; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java index 1cced2508dc..09d490e35c7 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java @@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.Bits; -import org.apache.lucene.util.TestUtil; /** * Helper class that adds some extra checks to ensure correct @@ -97,13 +96,6 @@ public class AssertingIndexSearcher extends IndexSearcher { return rewritten; } - @Override - protected Query wrapFilter(Query query, Filter filter) { - if (random.nextBoolean()) - return super.wrapFilter(query, filter); - return (filter == null) ? query : new FilteredQuery(query, filter, TestUtil.randomFilterStrategy(random)); - } - @Override protected void search(List leaves, Weight weight, Collector collector) throws IOException { // TODO: shouldn't we AssertingCollector.wrap(collector) here? diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java index e6a22a055f7..70e93c173f7 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/CheckHits.java @@ -424,18 +424,17 @@ public class CheckHits { super(r); } protected void checkExplanations(Query q) throws IOException { - super.search(q, null, + super.search(q, new ExplanationAsserter (q, null, this)); } @Override public TopFieldDocs search(Query query, - Filter filter, int n, Sort sort) throws IOException { checkExplanations(query); - return super.search(query,filter,n,sort); + return super.search(query,n,sort); } @Override public void search(Query query, Collector results) throws IOException { @@ -443,16 +442,10 @@ public class CheckHits { super.search(query, results); } @Override - public void search(Query query, Filter filter, Collector results) throws IOException { - checkExplanations(query); - super.search(query, filter, results); - } - @Override - public TopDocs search(Query query, Filter filter, - int n) throws IOException { + public TopDocs search(Query query, int n) throws IOException { checkExplanations(query); - return super.search(query,filter, n); + return super.search(query, n); } } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/SearchEquivalenceTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/SearchEquivalenceTestBase.java index 001903e8bda..325570bace9 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/SearchEquivalenceTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/SearchEquivalenceTestBase.java @@ -180,11 +180,15 @@ public abstract class SearchEquivalenceTestBase extends LuceneTestCase { * Both queries will be filtered by filter */ protected void assertSubsetOf(Query q1, Query q2, Filter filter) throws Exception { + if (filter != null) { + q1 = new FilteredQuery(q1, filter); + q2 = new FilteredQuery(q2, filter); + } // we test both INDEXORDER and RELEVANCE because we want to test needsScores=true/false for (Sort sort : new Sort[] { Sort.INDEXORDER, Sort.RELEVANCE }) { // not efficient, but simple! - TopDocs td1 = s1.search(q1, filter, reader.maxDoc(), sort); - TopDocs td2 = s2.search(q2, filter, reader.maxDoc(), sort); + TopDocs td1 = s1.search(q1, reader.maxDoc(), sort); + TopDocs td2 = s2.search(q2, reader.maxDoc(), sort); assertTrue(td1.totalHits <= td2.totalHits); // fill the superset into a bitset @@ -222,8 +226,12 @@ public abstract class SearchEquivalenceTestBase extends LuceneTestCase { protected void assertSameScores(Query q1, Query q2, Filter filter) throws Exception { // not efficient, but simple! - TopDocs td1 = s1.search(q1, filter, reader.maxDoc()); - TopDocs td2 = s2.search(q2, filter, reader.maxDoc()); + if (filter != null) { + q1 = new FilteredQuery(q1, filter); + q2 = new FilteredQuery(q2, filter); + } + TopDocs td1 = s1.search(q1, reader.maxDoc()); + TopDocs td2 = s2.search(q2, reader.maxDoc()); assertEquals(td1.totalHits, td2.totalHits); for (int i = 0; i < td1.scoreDocs.length; ++i) { assertEquals(td1.scoreDocs[i].doc, td2.scoreDocs[i].doc); diff --git a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java index 4266d23cca3..3e470901bb2 100644 --- a/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java +++ b/solr/contrib/analytics/src/java/org/apache/solr/analytics/accumulator/FacetingAccumulator.java @@ -33,6 +33,7 @@ import java.util.TreeMap; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.Query; import org.apache.solr.analytics.accumulator.facet.FacetValueAccumulator; import org.apache.solr.analytics.accumulator.facet.FieldFacetAccumulator; @@ -611,7 +612,7 @@ public class FacetingAccumulator extends BasicAccumulator implements FacetValueA } // The searcher sends docIds to the QueryFacetAccumulator which forwards // them to collectQuery() in this class for collection. - searcher.search(q, filter, qAcc); + searcher.search(new FilteredQuery(q, filter), qAcc); computeQueryFacet(qfr.getName()); queryCount++; } @@ -715,7 +716,7 @@ public class FacetingAccumulator extends BasicAccumulator implements FacetValueA RangeFacetAccumulator rAcc = new RangeFacetAccumulator(this,rfr.getName(),facetValue); // The searcher sends docIds to the RangeFacetAccumulator which forwards // them to collectRange() in this class for collection. - searcher.search(q, filter, rAcc); + searcher.search(new FilteredQuery(q, filter), rAcc); computeRangeFacet(sf.getName()); } } diff --git a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java index 5b28a309161..7399e785a57 100644 --- a/solr/core/src/java/org/apache/solr/handler/BlobHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/BlobHandler.java @@ -119,7 +119,7 @@ public class BlobHandler extends RequestHandlerBase implements PluginInfoInitia } TopFieldDocs docs = req.getSearcher().search(new TermQuery(new Term("blobName", blobName)), - null, 1, new Sort(new SortField("version", SortField.Type.LONG, true))); + 1, new Sort(new SortField("version", SortField.Type.LONG, true))); long version = 0; if(docs.totalHits >0){ diff --git a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java index 69ae5f91ea1..8fc41bfaa8e 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/ExpandComponent.java @@ -40,6 +40,7 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; @@ -377,7 +378,11 @@ public class ExpandComponent extends SearchComponent implements PluginInfoInitia collector = groupExpandCollector; } - searcher.search(query, pfilter.filter, collector); + if (pfilter.filter == null) { + searcher.search(query, collector); + } else { + searcher.search(new FilteredQuery(query, pfilter.filter), collector); + } LongObjectMap groups = ((GroupCollector)groupExpandCollector).getGroups(); Map outMap = new HashMap<>(); CharsRefBuilder charsRef = new CharsRefBuilder(); diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java index eb535d5838a..687c65e5ab6 100644 --- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java +++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java @@ -48,6 +48,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FilterCollector; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -225,7 +226,7 @@ public class SimpleFacets { return; } AbstractAllGroupHeadsCollector allGroupHeadsCollector = grouping.getCommands().get(0).createAllGroupCollector(); - searcher.search(new MatchAllDocsQuery(), base.getTopFilter(), allGroupHeadsCollector); + searcher.search(new FilteredQuery(new MatchAllDocsQuery(), base.getTopFilter()), allGroupHeadsCollector); this.docs = new BitDocSet(allGroupHeadsCollector.retrieveGroupHeads(searcher.maxDoc())); } else { this.docs = base; @@ -325,7 +326,7 @@ public class SimpleFacets { TermAllGroupsCollector collector = new TermAllGroupsCollector(groupField); Filter mainQueryFilter = docs.getTopFilter(); // This returns a filter that only matches documents matching with q param and fq params - searcher.search(facetQuery, mainQueryFilter, collector); + searcher.search(new FilteredQuery(facetQuery, mainQueryFilter), collector); return collector.getGroupCount(); } @@ -495,7 +496,7 @@ public class SimpleFacets { if (sf != null && sf.hasDocValues() == false && sf.multiValued() == false && sf.getType().getNumericType() != null) { // it's a single-valued numeric field: we must currently create insanity :( // there isn't a GroupedFacetCollector that works on numerics right now... - searcher.search(new MatchAllDocsQuery(), base.getTopFilter(), new FilterCollector(collector) { + searcher.search(new FilteredQuery(new MatchAllDocsQuery(), base.getTopFilter()), new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { LeafReader insane = Insanity.wrapInsanity(context.reader(), groupField); @@ -503,7 +504,7 @@ public class SimpleFacets { } }); } else { - searcher.search(new MatchAllDocsQuery(), base.getTopFilter(), collector); + searcher.search(new FilteredQuery(new MatchAllDocsQuery(), base.getTopFilter()), collector); } boolean orderByCount = sort.equals(FacetParams.FACET_SORT_COUNT) || sort.equals(FacetParams.FACET_SORT_COUNT_LEGACY); diff --git a/solr/core/src/java/org/apache/solr/search/Grouping.java b/solr/core/src/java/org/apache/solr/search/Grouping.java index 3d478a53b02..087bcca2dd1 100644 --- a/solr/core/src/java/org/apache/solr/search/Grouping.java +++ b/solr/core/src/java/org/apache/solr/search/Grouping.java @@ -35,6 +35,7 @@ import org.apache.lucene.queries.function.valuesource.QueryValueSource; import org.apache.lucene.search.CachingCollector; import org.apache.lucene.search.Collector; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; @@ -449,7 +450,11 @@ public class Grouping { collector = timeLimitingCollector; } try { - searcher.search(query, luceneFilter, collector); + Query q = query; + if (luceneFilter != null) { + q = new FilteredQuery(q, luceneFilter); + } + searcher.search(q, collector); } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) { logger.warn( "Query: " + query + "; " + x.getMessage() ); qr.setPartialResults(true); diff --git a/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java b/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java index 2e48ed9dae0..09ec1455b28 100644 --- a/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java +++ b/solr/core/src/java/org/apache/solr/search/LuceneQueryOptimizer.java @@ -110,7 +110,7 @@ if (c.query instanceof TermQuery) { queryOut[0] = query; filterOut[0] = filter; return null; } else { - return searcher.search(query, filter, numHits); + return searcher.search(new FilteredQuery(query, filter), numHits); } } diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index 491df302473..53af8efc0a2 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -180,7 +180,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn * and postFilter as well as any needed collector wrappers for dealing with options * specified in the QueryCOmmand. */ - private void buildAndRunCollectorChain(QueryResult qr, Query query, Filter luceneFilter, + private void buildAndRunCollectorChain(QueryResult qr, Query query, Collector collector, QueryCommand cmd, DelegatingCollector postFilter) throws IOException { final boolean terminateEarly = (cmd.getFlags() & TERMINATE_EARLY) == TERMINATE_EARLY; @@ -199,7 +199,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn } try { - super.search(query, luceneFilter, collector); + super.search(query, collector); if(collector instanceof DelegatingCollector) { ((DelegatingCollector)collector).finish(); } @@ -949,7 +949,11 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn collector = pf.postFilter; } - search(main, pf.filter, collector); + if (pf.filter != null) { + search(new FilteredQuery(main, pf.filter), collector); + } else { + search(main, collector); + } if(collector instanceof DelegatingCollector) { ((DelegatingCollector) collector).finish(); @@ -1229,10 +1233,10 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn try { if (filter == null) { - super.search(query, null, collector); + super.search(query, collector); } else { Filter luceneFilter = filter.getTopFilter(); - super.search(query, luceneFilter, collector); + super.search(new FilteredQuery(query, luceneFilter), collector); } } catch ( ExitableDirectoryReader.ExitingReaderException e) { log.warn("Query: " + query + "; " + e.getMessage()); @@ -1596,7 +1600,9 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn Query query = QueryUtils.makeQueryable(cmd.getQuery()); ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); - final Filter luceneFilter = pf.filter; + if (pf.filter != null) { + query = new FilteredQuery(query, pf.filter); + } // handle zero case... if (lastDocRequested<=0) { @@ -1638,7 +1644,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn }; } - buildAndRunCollectorChain(qr, query, luceneFilter, collector, cmd, pf.postFilter); + buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter); nDocsReturned=0; ids = new int[nDocsReturned]; @@ -1650,7 +1656,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn } else { final TopDocsCollector topCollector = buildTopDocsCollector(len, cmd); Collector collector = topCollector; - buildAndRunCollectorChain(qr, query, luceneFilter, collector, cmd, pf.postFilter); + buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter); totalHits = topCollector.getTotalHits(); TopDocs topDocs = topCollector.topDocs(0, len); @@ -1691,9 +1697,10 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn int smallSetSize = maxDoc>>6; ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList()); - final Filter luceneFilter = pf.filter; - Query query = QueryUtils.makeQueryable(cmd.getQuery()); + if (pf.filter != null) { + query = new FilteredQuery(query, pf.filter); + } // handle zero case... if (lastDocRequested<=0) { @@ -1729,7 +1736,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn collector = MultiCollector.wrap(setCollector, topScoreCollector); } - buildAndRunCollectorChain(qr, query, luceneFilter, collector, cmd, pf.postFilter); + buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter); set = setCollector.getDocSet(); @@ -1746,7 +1753,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn DocSetCollector setCollector = new DocSetCollector(maxDoc>>6, maxDoc); Collector collector = MultiCollector.wrap(topCollector, setCollector); - buildAndRunCollectorChain(qr, query, luceneFilter, collector, cmd, pf.postFilter); + buildAndRunCollectorChain(qr, query, collector, cmd, pf.postFilter); set = setCollector.getDocSet(); @@ -2075,7 +2082,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn BooleanQuery bq = new BooleanQuery(); bq.add(QueryUtils.makeQueryable(a), BooleanClause.Occur.MUST); bq.add(new ConstantScoreQuery(b.getTopFilter()), BooleanClause.Occur.MUST); - super.search(bq, null, collector); + super.search(bq, collector); return collector.getTotalHits(); } } diff --git a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java index 8bc96d3260d..465f0148554 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/CommandHandler.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.ExitableDirectoryReader; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.Collector; import org.apache.lucene.search.Filter; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.TimeLimitingCollector; @@ -208,8 +209,8 @@ public class CommandHandler { * Invokes search with the specified filter and collector. * If a time limit has been specified then wrap the collector in the TimeLimitingCollector */ - private void searchWithTimeLimiter(final Query query, - final ProcessedFilter filter, + private void searchWithTimeLimiter(Query query, + ProcessedFilter filter, Collector collector) throws IOException { if (queryCommand.getTimeAllowed() > 0 ) { collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), queryCommand.getTimeAllowed()); @@ -220,14 +221,16 @@ public class CommandHandler { collector = MultiCollector.wrap(collector, hitCountCollector); } - Filter luceneFilter = filter.filter; + if (filter.filter != null) { + query = new FilteredQuery(query, filter.filter); + } if (filter.postFilter != null) { filter.postFilter.setLastDelegate(collector); collector = filter.postFilter; } try { - searcher.search(query, luceneFilter, collector); + searcher.search(query, collector); } catch (TimeLimitingCollector.TimeExceededException | ExitableDirectoryReader.ExitingReaderException x) { partialResults = true; logger.warn( "Query: " + query + "; " + x.getMessage() ); diff --git a/solr/core/src/test/org/apache/solr/search/TestSort.java b/solr/core/src/test/org/apache/solr/search/TestSort.java index d45b10926d0..2763073f29c 100644 --- a/solr/core/src/test/org/apache/solr/search/TestSort.java +++ b/solr/core/src/test/org/apache/solr/search/TestSort.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FilterCollector; import org.apache.lucene.search.FilterLeafCollector; +import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; @@ -52,8 +53,8 @@ import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.uninverting.UninvertingReader; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BitDocIdSet; +import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.TestUtil; import org.apache.solr.SolrTestCaseJ4; @@ -295,7 +296,7 @@ public class TestSort extends SolrTestCaseJ4 { }; - searcher.search(new MatchAllDocsQuery(), filt, myCollector); + searcher.search(new FilteredQuery(new MatchAllDocsQuery(), filt), myCollector); Collections.sort(collectedDocs, new Comparator() { @Override diff --git a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java index c204d3715bc..0e2dcf91c30 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestOrdValues.java @@ -97,7 +97,7 @@ public class TestOrdValues extends LuceneTestCase { Query q = new FunctionQuery(vs); log("test: " + q); QueryUtils.check(random(), q, s); - ScoreDoc[] h = s.search(q, null, 1000).scoreDocs; + ScoreDoc[] h = s.search(q, 1000).scoreDocs; assertEquals("All docs should be matched!", N_DOCS, h.length); String prevID = inOrder ? "IE" // greater than all ids of docs in this test ("ID0001", etc.) @@ -145,7 +145,7 @@ public class TestOrdValues extends LuceneTestCase { vs = new ReverseOrdFieldSource(field); } Query q = new FunctionQuery(vs); - TopDocs td = s.search(q, null, 1000); + TopDocs td = s.search(q, 1000); assertEquals("All docs should be matched!", N_DOCS, td.totalHits); ScoreDoc sd[] = td.scoreDocs; for (int i = 0; i < sd.length; i++) {