diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index 6b2eb9b0643..89adfcc1228 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -71,8 +71,8 @@ API Changes * LUCENE-6217: Add IndexWriter.isOpen and getTragicException. (Simon Willnauer, Mike McCandless) -* LUCENE-6218: Add Collector.needsScores() and needsScores parameter - to Weight.scorer(). (Robert Muir) +* LUCENE-6218, LUCENE-6220: Add Collector.needsScores() and needsScores + parameter to Query.createWeight(). (Robert Muir, Adrien Grand) * LUCENE-4524: Merge DocsEnum and DocsAndPositionsEnum into a single PostingsEnum iterator. TermsEnum.docs() and TermsEnum.docsAndPositions() diff --git a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java index d3234556329..46563b7db58 100644 --- a/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/BooleanQuery.java @@ -174,15 +174,19 @@ public class BooleanQuery extends Query implements Iterable { protected ArrayList weights; protected int maxCoord; // num optional + num required private final boolean disableCoord; + private final boolean needsScores; - public BooleanWeight(IndexSearcher searcher, boolean disableCoord) + public BooleanWeight(IndexSearcher searcher, boolean needsScores, boolean disableCoord) throws IOException { + super(BooleanQuery.this); + this.needsScores = needsScores; this.similarity = searcher.getSimilarity(); this.disableCoord = disableCoord; weights = new ArrayList<>(clauses.size()); for (int i = 0 ; i < clauses.size(); i++) { BooleanClause c = clauses.get(i); - Weight w = c.getQuery().createWeight(searcher); + final boolean queryNeedsScores = needsScores && c.getOccur() != Occur.MUST_NOT; + Weight w = c.getQuery().createWeight(searcher, queryNeedsScores); weights.add(w); if (!c.isProhibited()) { maxCoord++; @@ -190,9 +194,6 @@ public class BooleanQuery extends Query implements Iterable { } } - @Override - public Query getQuery() { return BooleanQuery.this; } - @Override public float getValueForNormalization() throws IOException { float sum = 0.0f; @@ -242,7 +243,7 @@ public class BooleanQuery extends Query implements Iterable { for (Iterator wIter = weights.iterator(); wIter.hasNext();) { Weight w = wIter.next(); BooleanClause c = cIter.next(); - if (w.scorer(context, context.reader().getLiveDocs(), true) == null) { + if (w.scorer(context, context.reader().getLiveDocs()) == null) { if (c.isRequired()) { fail = true; Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")"); @@ -307,12 +308,12 @@ public class BooleanQuery extends Query implements Iterable { /** Try to build a boolean scorer for this weight. Returns null if {@link BooleanScorer} * cannot be used. */ // pkg-private for forcing use of BooleanScorer in tests - BooleanScorer booleanScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + BooleanScorer booleanScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { List optional = new ArrayList(); Iterator cIter = clauses.iterator(); for (Weight w : weights) { BooleanClause c = cIter.next(); - BulkScorer subScorer = w.bulkScorer(context, acceptDocs, needsScores); + BulkScorer subScorer = w.bulkScorer(context, acceptDocs); if (subScorer == null) { if (c.isRequired()) { return null; @@ -342,8 +343,8 @@ public class BooleanQuery extends Query implements Iterable { } @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - final BooleanScorer bulkScorer = booleanScorer(context, acceptDocs, needsScores); + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + final BooleanScorer bulkScorer = booleanScorer(context, acceptDocs); if (bulkScorer != null) { // BooleanScorer is applicable // TODO: what is the right heuristic here? final long costThreshold; @@ -366,11 +367,11 @@ public class BooleanQuery extends Query implements Iterable { return bulkScorer; } } - return super.bulkScorer(context, acceptDocs, needsScores); + return super.bulkScorer(context, acceptDocs); } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { // initially the user provided value, // but if minNrShouldMatch == optional.size(), // we will optimize and move these to required, making this 0 @@ -382,7 +383,7 @@ public class BooleanQuery extends Query implements Iterable { Iterator cIter = clauses.iterator(); for (Weight w : weights) { BooleanClause c = cIter.next(); - Scorer subScorer = w.scorer(context, acceptDocs, needsScores && c.isProhibited() == false); + Scorer subScorer = w.scorer(context, acceptDocs); if (subScorer == null) { if (c.isRequired()) { return null; @@ -532,8 +533,8 @@ public class BooleanQuery extends Query implements Iterable { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new BooleanWeight(searcher, disableCoord); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new BooleanWeight(searcher, needsScores, disableCoord); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java index 9ba923e89b6..898b395f841 100644 --- a/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/ConstantScoreQuery.java @@ -110,12 +110,8 @@ public class ConstantScoreQuery extends Query { private float queryWeight; public ConstantWeight(IndexSearcher searcher) throws IOException { - this.innerWeight = (query == null) ? null : query.createWeight(searcher); - } - - @Override - public Query getQuery() { - return ConstantScoreQuery.this; + super(ConstantScoreQuery.this); + this.innerWeight = (query == null) ? null : query.createWeight(searcher, false); } @Override @@ -135,13 +131,13 @@ public class ConstantScoreQuery extends Query { } @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { if (filter != null) { assert query == null; - return super.bulkScorer(context, acceptDocs, needsScores); + return super.bulkScorer(context, acceptDocs); } else { assert query != null && innerWeight != null; - BulkScorer bulkScorer = innerWeight.bulkScorer(context, acceptDocs, false); + BulkScorer bulkScorer = innerWeight.bulkScorer(context, acceptDocs); if (bulkScorer == null) { return null; } @@ -150,7 +146,7 @@ public class ConstantScoreQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { if (filter != null) { assert query == null; final DocIdSet dis = filter.getDocIdSet(context, acceptDocs); @@ -163,7 +159,7 @@ public class ConstantScoreQuery extends Query { return new ConstantDocIdSetIteratorScorer(disi, this, queryWeight); } else { assert query != null && innerWeight != null; - Scorer scorer = innerWeight.scorer(context, acceptDocs, false); + Scorer scorer = innerWeight.scorer(context, acceptDocs); if (scorer == null) { return null; } @@ -175,7 +171,7 @@ public class ConstantScoreQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - final Scorer cs = scorer(context, context.reader().getLiveDocs(), true); + final Scorer cs = scorer(context, context.reader().getLiveDocs()); final boolean exists = (cs != null && cs.advance(doc) == doc); final ComplexExplanation result = new ComplexExplanation(); @@ -331,7 +327,7 @@ public class ConstantScoreQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new ConstantScoreQuery.ConstantWeight(searcher); } diff --git a/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java b/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java index a63d61c3895..cfa19add59f 100644 --- a/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/DisjunctionMaxQuery.java @@ -118,16 +118,13 @@ public class DisjunctionMaxQuery extends Query implements Iterable { protected ArrayList weights = new ArrayList<>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts /** Construct the Weight for this Query searched by searcher. Recursively construct subquery weights. */ - public DisjunctionMaxWeight(IndexSearcher searcher) throws IOException { + public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + super(DisjunctionMaxQuery.this); for (Query disjunctQuery : disjuncts) { - weights.add(disjunctQuery.createWeight(searcher)); + weights.add(disjunctQuery.createWeight(searcher, needsScores)); } } - /** Return our associated DisjunctionMaxQuery */ - @Override - public Query getQuery() { return DisjunctionMaxQuery.this; } - /** Compute the sub of squared weights of us applied to our subqueries. Used for normalization. */ @Override public float getValueForNormalization() throws IOException { @@ -153,11 +150,11 @@ public class DisjunctionMaxQuery extends Query implements Iterable { /** Create the scorer used to score our associated DisjunctionMaxQuery */ @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { List scorers = new ArrayList<>(); for (Weight w : weights) { // we will advance() subscorers - Scorer subScorer = w.scorer(context, acceptDocs, needsScores); + Scorer subScorer = w.scorer(context, acceptDocs); if (subScorer != null) { scorers.add(subScorer); } @@ -197,8 +194,8 @@ public class DisjunctionMaxQuery extends Query implements Iterable { /** Create the Weight used to score us */ @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new DisjunctionMaxWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new DisjunctionMaxWeight(searcher, needsScores); } /** Optimize our representation and our subqueries representations diff --git a/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java b/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java index d2740e60cdd..bf6a8b7f55c 100644 --- a/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/FilteredQuery.java @@ -78,9 +78,9 @@ public class FilteredQuery extends Query { * This is accomplished by overriding the Scorer returned by the Weight. */ @Override - public Weight createWeight(final IndexSearcher searcher) throws IOException { - final Weight weight = query.createWeight (searcher); - return new Weight() { + public Weight createWeight(final IndexSearcher searcher, boolean needsScores) throws IOException { + final Weight weight = query.createWeight (searcher, needsScores); + return new Weight(FilteredQuery.this) { @Override public float getValueForNormalization() throws IOException { @@ -111,15 +111,9 @@ public class FilteredQuery extends Query { } } - // return this query - @Override - public Query getQuery() { - return FilteredQuery.this; - } - // return a filtering scorer @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assert filter != null; DocIdSet filterDocIdSet = filter.getDocIdSet(context, acceptDocs); @@ -128,12 +122,12 @@ public class FilteredQuery extends Query { return null; } - return strategy.filteredScorer(context, weight, filterDocIdSet, needsScores); + return strategy.filteredScorer(context, weight, filterDocIdSet); } // return a filtering top scorer @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assert filter != null; DocIdSet filterDocIdSet = filter.getDocIdSet(context, acceptDocs); @@ -443,7 +437,7 @@ public class FilteredQuery extends Query { * @throws IOException if an {@link IOException} occurs */ public abstract Scorer filteredScorer(LeafReaderContext context, - Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException; + Weight weight, DocIdSet docIdSet) throws IOException; /** * Returns a filtered {@link BulkScorer} based on this @@ -459,7 +453,7 @@ public class FilteredQuery extends Query { */ public BulkScorer filteredBulkScorer(LeafReaderContext context, Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException { - Scorer scorer = filteredScorer(context, weight, docIdSet, needsScores); + Scorer scorer = filteredScorer(context, weight, docIdSet); if (scorer == null) { return null; } @@ -481,7 +475,7 @@ public class FilteredQuery extends Query { public static class RandomAccessFilterStrategy extends FilterStrategy { @Override - public Scorer filteredScorer(LeafReaderContext context, Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException { + public Scorer filteredScorer(LeafReaderContext context, Weight weight, DocIdSet docIdSet) throws IOException { final DocIdSetIterator filterIter = docIdSet.iterator(); if (filterIter == null) { // this means the filter does not accept any documents. @@ -493,11 +487,11 @@ public class FilteredQuery extends Query { final boolean useRandomAccess = filterAcceptDocs != null && useRandomAccess(filterAcceptDocs, filterIter.cost()); if (useRandomAccess) { // if we are using random access, we return the inner scorer, just with other acceptDocs - return weight.scorer(context, filterAcceptDocs, needsScores); + return weight.scorer(context, filterAcceptDocs); } else { // we are gonna advance() this scorer, so we set inorder=true/toplevel=false // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice - final Scorer scorer = weight.scorer(context, null, needsScores); + final Scorer scorer = weight.scorer(context, null); return (scorer == null) ? null : new LeapFrogScorer(weight, filterIter, scorer, scorer); } } @@ -530,14 +524,14 @@ public class FilteredQuery extends Query { @Override public Scorer filteredScorer(LeafReaderContext context, - Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException { + Weight weight, DocIdSet docIdSet) throws IOException { final DocIdSetIterator filterIter = docIdSet.iterator(); if (filterIter == null) { // this means the filter does not accept any documents. return null; } // we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice - final Scorer scorer = weight.scorer(context, null, needsScores); + final Scorer scorer = weight.scorer(context, null); if (scorer == null) { return null; } @@ -566,14 +560,14 @@ public class FilteredQuery extends Query { private static final class QueryFirstFilterStrategy extends FilterStrategy { @Override public Scorer filteredScorer(final LeafReaderContext context, - Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException { + Weight weight, DocIdSet docIdSet) throws IOException { Bits filterAcceptDocs = docIdSet.bits(); if (filterAcceptDocs == null) { // Filter does not provide random-access Bits; we // must fallback to leapfrog: - return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredScorer(context, weight, docIdSet, needsScores); + return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredScorer(context, weight, docIdSet); } - final Scorer scorer = weight.scorer(context, null, needsScores); + final Scorer scorer = weight.scorer(context, null); return scorer == null ? null : new QueryFirstScorer(weight, filterAcceptDocs, scorer); } @@ -586,7 +580,7 @@ public class FilteredQuery extends Query { // must fallback to leapfrog: return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredBulkScorer(context, weight, docIdSet, needsScores); } - final Scorer scorer = weight.scorer(context, null, needsScores); + final Scorer scorer = weight.scorer(context, null); return scorer == null ? null : new QueryFirstBulkScorer(scorer, filterAcceptDocs); } } diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java index 474528e7fcf..596ede4784f 100644 --- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java +++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java @@ -226,8 +226,53 @@ public class IndexSearcher { * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ - public TopDocs searchAfter(ScoreDoc after, Query query, int n) throws IOException { - return search(createNormalizedWeight(query), after, n); + public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException { + final int limit = Math.max(1, reader.maxDoc()); + if (after != null && after.doc >= limit) { + throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + + after.doc + " limit=" + limit); + } + numHits = Math.min(numHits, limit); + + if (executor == null) { + final TopScoreDocCollector collector = TopScoreDocCollector.create(numHits, after); + search(query, collector); + return collector.topDocs(); + } else { + final TopScoreDocCollector[] collectors = new TopScoreDocCollector[leafSlices.length]; + boolean needsScores = false; + for (int i = 0; i < leafSlices.length; ++i) { + collectors[i] = TopScoreDocCollector.create(numHits, after); + needsScores |= collectors[i].needsScores(); + } + + final Weight weight = createNormalizedWeight(query, needsScores); + final List> topDocsFutures = new ArrayList<>(leafSlices.length); + for (int i = 0; i < leafSlices.length; ++i) { + final LeafReaderContext[] leaves = leafSlices[i].leaves; + final TopScoreDocCollector collector = collectors[i]; + topDocsFutures.add(executor.submit(new Callable() { + @Override + public TopDocs call() throws Exception { + search(Arrays.asList(leaves), weight, collector); + return collector.topDocs(); + } + })); + } + + final TopDocs[] topDocs = new TopDocs[leafSlices.length]; + for (int i = 0; i < topDocs.length; ++i) { + try { + topDocs[i] = topDocsFutures.get(i).get(); + } catch (InterruptedException e) { + throw new ThreadInterruptedException(e); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + + return TopDocs.merge(numHits, topDocs); + } } /** Finds the top n @@ -242,7 +287,7 @@ public class IndexSearcher { * {@link BooleanQuery#getMaxClauseCount()} clauses. */ public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException { - return search(createNormalizedWeight(wrapFilter(query, filter)), after, n); + return searchAfter(after, wrapFilter(query, filter), n); } /** Finds the top n @@ -253,7 +298,7 @@ public class IndexSearcher { */ public TopDocs search(Query query, int n) throws IOException { - return search(query, null, n); + return searchAfter(null, query, n); } @@ -265,7 +310,7 @@ public class IndexSearcher { */ public TopDocs search(Query query, Filter filter, int n) throws IOException { - return search(createNormalizedWeight(wrapFilter(query, filter)), null, n); + return search(wrapFilter(query, filter), n); } /** Lower-level search API. @@ -281,7 +326,7 @@ public class IndexSearcher { */ public void search(Query query, Filter filter, Collector results) throws IOException { - search(leafContexts, createNormalizedWeight(wrapFilter(query, filter)), results); + search(wrapFilter(query, filter), results); } /** Lower-level search API. @@ -293,7 +338,7 @@ public class IndexSearcher { */ public void search(Query query, Collector results) throws IOException { - search(leafContexts, createNormalizedWeight(query), results); + search(leafContexts, createNormalizedWeight(query, results.needsScores()), results); } /** Search implementation with arbitrary sorting. Finds @@ -310,7 +355,7 @@ public class IndexSearcher { */ public TopFieldDocs search(Query query, Filter filter, int n, Sort sort) throws IOException { - return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, false, false); + return search(query, filter, n, sort, false, false); } /** Search implementation with arbitrary sorting, plus @@ -329,7 +374,7 @@ public class IndexSearcher { */ public TopFieldDocs search(Query query, Filter filter, int n, Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException { - return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, doDocScores, doMaxScore); + return searchAfter(null, query, filter, n, sort, doDocScores, doMaxScore); } /** Finds the top n @@ -343,13 +388,8 @@ public class IndexSearcher { * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ - public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException { - if (after != null && !(after instanceof FieldDoc)) { - // TODO: if we fix type safety of TopFieldDocs we can - // remove this - throw new IllegalArgumentException("after must be a FieldDoc; got " + after); - } - return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true, false, false); + public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException { + return searchAfter(after, query, filter, n, sort, false, false); } /** @@ -362,7 +402,7 @@ public class IndexSearcher { */ public TopFieldDocs search(Query query, int n, Sort sort) throws IOException { - return search(createNormalizedWeight(query), n, sort, false, false); + return search(query, null, n, sort, false, false); } /** Finds the top n @@ -377,12 +417,7 @@ public class IndexSearcher { * {@link BooleanQuery#getMaxClauseCount()} clauses. */ public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException { - if (after != null && !(after instanceof FieldDoc)) { - // TODO: if we fix type safety of TopFieldDocs we can - // remove this - throw new IllegalArgumentException("after must be a FieldDoc; got " + after); - } - return search(createNormalizedWeight(query), (FieldDoc) after, n, sort, true, false, false); + return searchAfter(after, query, null, n, sort, false, false); } /** Finds the top n @@ -401,123 +436,54 @@ public class IndexSearcher { * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ - public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort, - boolean doDocScores, boolean doMaxScore) throws IOException { + public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int numHits, Sort sort, + boolean doDocScores, boolean doMaxScore) throws IOException { if (after != null && !(after instanceof FieldDoc)) { // TODO: if we fix type safety of TopFieldDocs we can // remove this throw new IllegalArgumentException("after must be a FieldDoc; got " + after); } - return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true, - doDocScores, doMaxScore); + return searchAfter((FieldDoc) after, wrapFilter(query, filter), numHits, sort, doDocScores, doMaxScore); } - /** Expert: Low-level search implementation. Finds the top n - * hits for query, applying filter if non-null. - * - *

Applications should usually call {@link IndexSearcher#search(Query,int)} or - * {@link IndexSearcher#search(Query,Filter,int)} instead. - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - protected TopDocs search(Weight weight, ScoreDoc after, int nDocs) throws IOException { - int limit = reader.maxDoc(); - if (limit == 0) { - limit = 1; - } + private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort sort, + boolean doDocScores, boolean doMaxScore) throws IOException { + final int limit = Math.max(1, reader.maxDoc()); if (after != null && after.doc >= limit) { throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + after.doc + " limit=" + limit); } - nDocs = Math.min(nDocs, limit); - + numHits = Math.min(numHits, limit); + + final boolean fillFields = true; if (executor == null) { - return search(leafContexts, weight, after, nDocs); + final TopFieldCollector collector = TopFieldCollector.create(sort, numHits, after, fillFields, doDocScores, doMaxScore); + search(query, collector); + return collector.topDocs(); } else { - final List> topDocsFutures = new ArrayList<>(leafSlices.length); - for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice - topDocsFutures.add(executor.submit(new SearcherCallableNoSort(this, leafSlices[i], weight, after, nDocs))); + final TopFieldCollector[] collectors = new TopFieldCollector[leafSlices.length]; + boolean needsScores = false; + for (int i = 0; i < leafSlices.length; ++i) { + collectors[i] = TopFieldCollector.create(sort, numHits, after, fillFields, doDocScores, doMaxScore); + needsScores |= collectors[i].needsScores(); } - final TopDocs[] topDocs = new TopDocs[leafSlices.length]; - for (int i = 0; i < leafSlices.length; i++) { - try { - topDocs[i] = topDocsFutures.get(i).get(); - } catch (InterruptedException e) { - throw new ThreadInterruptedException(e); - } catch (ExecutionException e) { - throw new RuntimeException(e); - } - } - return TopDocs.merge(null, nDocs, topDocs); - } - } - /** Expert: Low-level search implementation. Finds the top n - * hits for query. - * - *

Applications should usually call {@link IndexSearcher#search(Query,int)} or - * {@link IndexSearcher#search(Query,Filter,int)} instead. - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - protected TopDocs search(List leaves, Weight weight, ScoreDoc after, int nDocs) throws IOException { - // single thread - int limit = reader.maxDoc(); - if (limit == 0) { - limit = 1; - } - nDocs = Math.min(nDocs, limit); - TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, after); - search(leaves, weight, collector); - return collector.topDocs(); - } - - /** Expert: Low-level search implementation with arbitrary - * sorting and control over whether hit scores and max - * score should be computed. Finds - * the top n hits for query and sorting the hits - * by the criteria in sort. - * - *

Applications should usually call {@link - * IndexSearcher#search(Query,Filter,int,Sort)} instead. - * - * @throws BooleanQuery.TooManyClauses If a query would exceed - * {@link BooleanQuery#getMaxClauseCount()} clauses. - */ - protected TopFieldDocs search(Weight weight, - final int nDocs, Sort sort, - boolean doDocScores, boolean doMaxScore) throws IOException { - return search(weight, null, nDocs, sort, true, doDocScores, doMaxScore); - } - - /** - * Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose - * whether or not the fields in the returned {@link FieldDoc} instances should - * be set by specifying fillFields. - */ - protected TopFieldDocs search(Weight weight, FieldDoc after, int nDocs, - Sort sort, boolean fillFields, - boolean doDocScores, boolean doMaxScore) - throws IOException { - - if (sort == null) throw new NullPointerException("Sort must not be null"); - - int limit = reader.maxDoc(); - if (limit == 0) { - limit = 1; - } - nDocs = Math.min(nDocs, limit); - - if (executor == null) { - // use all leaves here! - return search(leafContexts, weight, after, nDocs, sort, fillFields, doDocScores, doMaxScore); - } else { + final Weight weight = createNormalizedWeight(query, needsScores); final List> topDocsFutures = new ArrayList<>(leafSlices.length); - for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice - topDocsFutures.add(executor.submit(new SearcherCallableWithSort(this, leafSlices[i], weight, after, nDocs, sort, doDocScores, doMaxScore))); + for (int i = 0; i < leafSlices.length; ++i) { + final LeafReaderContext[] leaves = leafSlices[i].leaves; + final TopFieldCollector collector = collectors[i]; + topDocsFutures.add(executor.submit(new Callable() { + @Override + public TopFieldDocs call() throws Exception { + search(Arrays.asList(leaves), weight, collector); + return collector.topDocs(); + } + })); } + final TopFieldDocs[] topDocs = new TopFieldDocs[leafSlices.length]; - for (int i = 0; i < leafSlices.length; i++) { + for (int i = 0; i < topDocs.length; ++i) { try { topDocs[i] = topDocsFutures.get(i).get(); } catch (InterruptedException e) { @@ -526,30 +492,9 @@ public class IndexSearcher { throw new RuntimeException(e); } } - return (TopFieldDocs) TopDocs.merge(sort, nDocs, topDocs); - } - } - - - /** - * Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose - * whether or not the fields in the returned {@link FieldDoc} instances should - * be set by specifying fillFields. - */ - protected TopFieldDocs search(List leaves, Weight weight, FieldDoc after, int nDocs, - Sort sort, boolean fillFields, boolean doDocScores, boolean doMaxScore) throws IOException { - // single thread - int limit = reader.maxDoc(); - if (limit == 0) { - limit = 1; - } - nDocs = Math.min(nDocs, limit); - TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, after, - fillFields, doDocScores, - doMaxScore); - search(leaves, weight, collector); - return (TopFieldDocs) collector.topDocs(); + return TopDocs.merge(sort, numHits, topDocs); + } } /** @@ -586,7 +531,7 @@ public class IndexSearcher { // continue with the following leaf continue; } - BulkScorer scorer = weight.bulkScorer(ctx, ctx.reader().getLiveDocs(), collector.needsScores()); + BulkScorer scorer = weight.bulkScorer(ctx, ctx.reader().getLiveDocs()); if (scorer != null) { try { scorer.score(leafCollector); @@ -620,7 +565,7 @@ public class IndexSearcher { * entire index. */ public Explanation explain(Query query, int doc) throws IOException { - return explain(createNormalizedWeight(query), doc); + return explain(createNormalizedWeight(query, true), doc); } /** Expert: low-level implementation method @@ -650,9 +595,9 @@ public class IndexSearcher { * can then directly be used to get a {@link Scorer}. * @lucene.internal */ - public Weight createNormalizedWeight(Query query) throws IOException { + public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { query = rewrite(query); - Weight weight = query.createWeight(this); + Weight weight = query.createWeight(this, needsScores); float v = weight.getValueForNormalization(); float norm = getSimilarity().queryNorm(v); if (Float.isInfinite(norm) || Float.isNaN(norm)) { @@ -671,68 +616,6 @@ public class IndexSearcher { return readerContext; } - /** - * A thread subclass for searching a single searchable - */ - private static final class SearcherCallableNoSort implements Callable { - - private final IndexSearcher searcher; - private final Weight weight; - private final ScoreDoc after; - private final int nDocs; - private final LeafSlice slice; - - public SearcherCallableNoSort(IndexSearcher searcher, LeafSlice slice, Weight weight, - ScoreDoc after, int nDocs) { - this.searcher = searcher; - this.weight = weight; - this.after = after; - this.nDocs = nDocs; - this.slice = slice; - } - - @Override - public TopDocs call() throws IOException { - return searcher.search(Arrays.asList(slice.leaves), weight, after, nDocs); - } - } - - - /** - * A thread subclass for searching a single searchable - */ - private static final class SearcherCallableWithSort implements Callable { - - private final IndexSearcher searcher; - private final Weight weight; - private final int nDocs; - private final Sort sort; - private final LeafSlice slice; - private final FieldDoc after; - private final boolean doDocScores; - private final boolean doMaxScore; - - public SearcherCallableWithSort(IndexSearcher searcher, LeafSlice slice, Weight weight, - FieldDoc after, int nDocs, Sort sort, - boolean doDocScores, boolean doMaxScore) { - this.searcher = searcher; - this.weight = weight; - this.nDocs = nDocs; - this.sort = sort; - this.slice = slice; - this.after = after; - this.doDocScores = doDocScores; - this.doMaxScore = doMaxScore; - } - - @Override - public TopFieldDocs call() throws IOException { - assert slice.leaves.length == 1; - return searcher.search(Arrays.asList(slice.leaves), - weight, after, nDocs, sort, true, doDocScores, doMaxScore); - } - } - /** * A class holding a subset of the {@link IndexSearcher}s leaf contexts to be * executed within a single thread. diff --git a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java index aee68872d14..8817abc0eed 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/MatchAllDocsQuery.java @@ -110,6 +110,7 @@ public class MatchAllDocsQuery extends Query { private float queryNorm; public MatchAllDocsWeight(IndexSearcher searcher) { + super(MatchAllDocsQuery.this); } @Override @@ -117,11 +118,6 @@ public class MatchAllDocsQuery extends Query { return "weight(" + MatchAllDocsQuery.this + ")"; } - @Override - public Query getQuery() { - return MatchAllDocsQuery.this; - } - @Override public float getValueForNormalization() { queryWeight = getBoost(); @@ -135,7 +131,7 @@ public class MatchAllDocsQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new MatchAllScorer(context.reader(), acceptDocs, this, queryWeight); } @@ -154,7 +150,7 @@ public class MatchAllDocsQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) { return new MatchAllDocsWeight(searcher); } diff --git a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java index 4061e045ef2..e515e39e5c6 100644 --- a/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/MultiPhraseQuery.java @@ -141,9 +141,12 @@ public class MultiPhraseQuery extends Query { private final Similarity similarity; private final Similarity.SimWeight stats; private final Map termContexts = new HashMap<>(); + private final boolean needsScores; - public MultiPhraseWeight(IndexSearcher searcher) + public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + super(MultiPhraseQuery.this); + this.needsScores = needsScores; this.similarity = searcher.getSimilarity(); final IndexReaderContext context = searcher.getTopReaderContext(); @@ -164,9 +167,6 @@ public class MultiPhraseQuery extends Query { allTermStats.toArray(new TermStatistics[allTermStats.size()])); } - @Override - public Query getQuery() { return MultiPhraseQuery.this; } - @Override public float getValueForNormalization() { return stats.getValueForNormalization(); @@ -178,7 +178,7 @@ public class MultiPhraseQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assert !termArrays.isEmpty(); final LeafReader reader = context.reader(); final Bits liveDocs = acceptDocs; @@ -256,7 +256,7 @@ public class MultiPhraseQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Scorer scorer = scorer(context, context.reader().getLiveDocs(), true); + Scorer scorer = scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { @@ -296,8 +296,8 @@ public class MultiPhraseQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new MultiPhraseWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new MultiPhraseWeight(searcher, needsScores); } /** Prints a user-readable version of this query. */ diff --git a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java index e28f8295960..55c47207c6a 100644 --- a/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/PhraseQuery.java @@ -211,10 +211,13 @@ public class PhraseQuery extends Query { private class PhraseWeight extends Weight { private final Similarity similarity; private final Similarity.SimWeight stats; + private final boolean needsScores; private transient TermContext states[]; - public PhraseWeight(IndexSearcher searcher) + public PhraseWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + super(PhraseQuery.this); + this.needsScores = needsScores; this.similarity = searcher.getSimilarity(); final IndexReaderContext context = searcher.getTopReaderContext(); states = new TermContext[terms.size()]; @@ -230,9 +233,6 @@ public class PhraseQuery extends Query { @Override public String toString() { return "weight(" + PhraseQuery.this + ")"; } - @Override - public Query getQuery() { return PhraseQuery.this; } - @Override public float getValueForNormalization() { return stats.getValueForNormalization(); @@ -244,7 +244,7 @@ public class PhraseQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assert !terms.isEmpty(); final LeafReader reader = context.reader(); final Bits liveDocs = acceptDocs; @@ -297,7 +297,7 @@ public class PhraseQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Scorer scorer = scorer(context, context.reader().getLiveDocs(), true); + Scorer scorer = scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { @@ -318,8 +318,8 @@ public class PhraseQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new PhraseWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new PhraseWeight(searcher, needsScores); } /** diff --git a/lucene/core/src/java/org/apache/lucene/search/Query.java b/lucene/core/src/java/org/apache/lucene/search/Query.java index 92dc692c7d6..cd03ccb9f48 100644 --- a/lucene/core/src/java/org/apache/lucene/search/Query.java +++ b/lucene/core/src/java/org/apache/lucene/search/Query.java @@ -18,7 +18,6 @@ package org.apache.lucene.search; */ import java.io.IOException; - import java.util.Set; import org.apache.lucene.index.IndexReader; @@ -72,11 +71,13 @@ public abstract class Query implements Cloneable { /** * Expert: Constructs an appropriate Weight implementation for this query. - * *

* Only implemented by primitive queries, which re-write to themselves. + * + * @param needsScores True if document scores ({@link Scorer#score}) or match + * frequencies ({@link Scorer#freq}) are needed. */ - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { throw new UnsupportedOperationException("Query " + this + " does not implement createWeight"); } diff --git a/lucene/core/src/java/org/apache/lucene/search/QueryRescorer.java b/lucene/core/src/java/org/apache/lucene/search/QueryRescorer.java index e659868e9c5..b01668d9a86 100644 --- a/lucene/core/src/java/org/apache/lucene/search/QueryRescorer.java +++ b/lucene/core/src/java/org/apache/lucene/search/QueryRescorer.java @@ -61,7 +61,7 @@ public abstract class QueryRescorer extends Rescorer { List leaves = searcher.getIndexReader().leaves(); - Weight weight = searcher.createNormalizedWeight(query); + Weight weight = searcher.createNormalizedWeight(query, true); // Now merge sort docIDs from hits, with reader's leaves: int hitUpto = 0; @@ -83,7 +83,7 @@ public abstract class QueryRescorer extends Rescorer { if (readerContext != null) { // We advanced to another segment: docBase = readerContext.docBase; - scorer = weight.scorer(readerContext, null, true); + scorer = weight.scorer(readerContext, null); } if(scorer != null) { diff --git a/lucene/core/src/java/org/apache/lucene/search/QueryWrapperFilter.java b/lucene/core/src/java/org/apache/lucene/search/QueryWrapperFilter.java index 0b7a599b413..4756d2cd55f 100644 --- a/lucene/core/src/java/org/apache/lucene/search/QueryWrapperFilter.java +++ b/lucene/core/src/java/org/apache/lucene/search/QueryWrapperFilter.java @@ -53,11 +53,11 @@ public class QueryWrapperFilter extends Filter { public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException { // get a private context that is used to rewrite, createWeight and score eventually final LeafReaderContext privateContext = context.reader().getContext(); - final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query); + final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, false); return new DocIdSet() { @Override public DocIdSetIterator iterator() throws IOException { - return weight.scorer(privateContext, acceptDocs, false); + return weight.scorer(privateContext, acceptDocs); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java index e371ecfa272..3a353e5fdea 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TermQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/TermQuery.java @@ -47,9 +47,12 @@ public class TermQuery extends Query { private final Similarity similarity; private final Similarity.SimWeight stats; private final TermContext termStates; - - public TermWeight(IndexSearcher searcher, TermContext termStates) + private final boolean needsScores; + + public TermWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates) throws IOException { + super(TermQuery.this); + this.needsScores = needsScores; assert termStates != null : "TermContext must not be null"; this.termStates = termStates; this.similarity = searcher.getSimilarity(); @@ -63,11 +66,6 @@ public class TermQuery extends Query { return "weight(" + TermQuery.this + ")"; } - @Override - public Query getQuery() { - return TermQuery.this; - } - @Override public float getValueForNormalization() { return stats.getValueForNormalization(); @@ -79,7 +77,7 @@ public class TermQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context); final TermsEnum termsEnum = getTermsEnum(context); if (termsEnum == null) { @@ -117,7 +115,7 @@ public class TermQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Scorer scorer = scorer(context, context.reader().getLiveDocs(), true); + Scorer scorer = scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { @@ -170,7 +168,7 @@ public class TermQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final IndexReaderContext context = searcher.getTopReaderContext(); final TermContext termState; if (perReaderTermState == null @@ -186,7 +184,7 @@ public class TermQuery extends Query { // we must not ignore the given docFreq - if set use the given value (lie) if (docFreq != -1) termState.setDocFreq(docFreq); - return new TermWeight(searcher, termState); + return new TermWeight(searcher, needsScores, termState); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/TopDocs.java b/lucene/core/src/java/org/apache/lucene/search/TopDocs.java index 062718dc5fe..e54c1002642 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopDocs.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopDocs.java @@ -158,7 +158,6 @@ public class TopDocs { // Returns true if first is < second @Override - @SuppressWarnings({"unchecked","rawtypes"}) public boolean lessThan(ShardRef first, ShardRef second) { assert first != second; final FieldDoc firstFD = (FieldDoc) shardHits[first.shardIndex][first.hitIndex]; @@ -195,25 +194,48 @@ public class TopDocs { } /** Returns a new TopDocs, containing topN results across - * the provided TopDocs, sorting by the specified {@link + * the provided TopDocs, sorting by score. Each {@link TopDocs} + * instance must be sorted. + * @lucene.experimental */ + public static TopDocs merge(int topN, TopDocs[] shardHits) throws IOException { + return merge(0, topN, shardHits); + } + + /** + * Same as {@link #merge(int, TopDocs[])} but also ignores the top + * {@code start} top docs. This is typically useful for pagination. + * @lucene.experimental + */ + public static TopDocs merge(int start, int topN, TopDocs[] shardHits) throws IOException { + return mergeAux(null, start, topN, shardHits); + } + + /** Returns a new TopFieldDocs, containing topN results across + * the provided TopFieldDocs, sorting by the specified {@link * Sort}. Each of the TopDocs must have been sorted by * the same Sort, and sort field values must have been * filled (ie, fillFields=true must be - * passed to {@link - * TopFieldCollector#create}. - * - *

Pass sort=null to merge sort by score descending. - * + * passed to {@link TopFieldCollector#create}). * @lucene.experimental */ - public static TopDocs merge(Sort sort, int topN, TopDocs[] shardHits) throws IOException { + public static TopFieldDocs merge(Sort sort, int topN, TopFieldDocs[] shardHits) throws IOException { return merge(sort, 0, topN, shardHits); } /** - * Same as {@link #merge(Sort, int, TopDocs[])} but also slices the result at the same time based - * on the provided start and size. The return TopDocs will always have a scoreDocs with length of at most size. + * Same as {@link #merge(Sort, int, TopFieldDocs[])} but also ignores the top + * {@code start} top docs. This is typically useful for pagination. + * @lucene.experimental */ - public static TopDocs merge(Sort sort, int start, int size, TopDocs[] shardHits) throws IOException { + public static TopFieldDocs merge(Sort sort, int start, int topN, TopFieldDocs[] shardHits) throws IOException { + if (sort == null) { + throw new IllegalArgumentException("sort must be non-null when merging field-docs"); + } + return (TopFieldDocs) mergeAux(sort, start, topN, shardHits); + } + + /** Auxiliary method used by the {@link #merge} impls. A sort value of null + * is used to indicate that docs should be sorted by score. */ + private static TopDocs mergeAux(Sort sort, int start, int size, TopDocs[] shardHits) throws IOException { final PriorityQueue queue; if (sort == null) { queue = new ScoreMergeSortQueue(shardHits); diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java index 0b100ec7483..f153c791e8c 100644 --- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java +++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java @@ -698,4 +698,8 @@ public abstract class TopFieldCollector extends TopDocsCollector { return new TopFieldDocs(totalHits, results, ((FieldValueHitQueue) pq).getFields(), maxScore); } + @Override + public TopFieldDocs topDocs() { + return (TopFieldDocs) super.topDocs(); + } } diff --git a/lucene/core/src/java/org/apache/lucene/search/Weight.java b/lucene/core/src/java/org/apache/lucene/search/Weight.java index 4522287a4ac..8ba2403078a 100644 --- a/lucene/core/src/java/org/apache/lucene/search/Weight.java +++ b/lucene/core/src/java/org/apache/lucene/search/Weight.java @@ -34,14 +34,14 @@ import org.apache.lucene.util.Bits; * {@link org.apache.lucene.index.LeafReader} dependent state should reside in the {@link Scorer}. *

* Since {@link Weight} creates {@link Scorer} instances for a given - * {@link org.apache.lucene.index.LeafReaderContext} ({@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits, boolean)}) + * {@link org.apache.lucene.index.LeafReaderContext} ({@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits)}) * callers must maintain the relationship between the searcher's top-level * {@link IndexReaderContext} and the context used to create a {@link Scorer}. *

* A Weight is used in the following way: *

    *
  1. A Weight is constructed by a top-level query, given a - * IndexSearcher ({@link Query#createWeight(IndexSearcher)}). + * IndexSearcher ({@link Query#createWeight(IndexSearcher, boolean)}). *
  2. The {@link #getValueForNormalization()} method is called on the * Weight to compute the query normalization factor * {@link Similarity#queryNorm(float)} of the query clauses contained in the @@ -49,13 +49,22 @@ import org.apache.lucene.util.Bits; *
  3. The query normalization factor is passed to {@link #normalize(float, float)}. At * this point the weighting is complete. *
  4. A Scorer is constructed by - * {@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits, boolean)}. + * {@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits)}. *
* * @since 2.9 */ public abstract class Weight { + protected final Query parentQuery; + + /** Sole constructor, typically invoked by sub-classes. + * @param query the parent query + */ + protected Weight(Query query) { + this.parentQuery = query; + } + /** * An explanation of the score computation for the named document. * @@ -67,7 +76,9 @@ public abstract class Weight { public abstract Explanation explain(LeafReaderContext context, int doc) throws IOException; /** The query that this concerns. */ - public abstract Query getQuery(); + public final Query getQuery() { + return parentQuery; + } /** The value for normalization of contained query clauses (e.g. sum of squared weights). */ public abstract float getValueForNormalization() throws IOException; @@ -87,13 +98,11 @@ public abstract class Weight { * @param acceptDocs * Bits that represent the allowable docs to match (typically deleted docs * but possibly filtering other documents) - * @param needsScores - * True if document scores ({@link Scorer#score}) or match frequencies ({@link Scorer#freq}) are needed. * * @return a {@link Scorer} which scores documents in/out-of order. * @throws IOException if there is a low-level I/O error */ - public abstract Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException; + public abstract Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException; /** * Optional method, to return a {@link BulkScorer} to @@ -108,16 +117,14 @@ public abstract class Weight { * @param acceptDocs * Bits that represent the allowable docs to match (typically deleted docs * but possibly filtering other documents) - * @param needsScores - * True if document scores are needed. * * @return a {@link BulkScorer} which scores documents and * passes them to a collector. * @throws IOException if there is a low-level I/O error */ - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { - Scorer scorer = scorer(context, acceptDocs, needsScores); + Scorer scorer = scorer(context, acceptDocs); if (scorer == null) { // No docs match return null; diff --git a/lucene/core/src/java/org/apache/lucene/search/package.html b/lucene/core/src/java/org/apache/lucene/search/package.html index 6f7d624b0c9..dc358447a77 100644 --- a/lucene/core/src/java/org/apache/lucene/search/package.html +++ b/lucene/core/src/java/org/apache/lucene/search/package.html @@ -389,7 +389,7 @@ on the built-in available scoring models and extending or changing Similarity. {@link org.apache.lucene.search.Query Query} class has several methods that are important for derived classes:
    -
  1. {@link org.apache.lucene.search.Query#createWeight(IndexSearcher) createWeight(IndexSearcher searcher)} — A +
  2. {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)} — A {@link org.apache.lucene.search.Weight Weight} is the internal representation of the Query, so each Query implementation must provide an implementation of Weight. See the subsection on {@link org.apache.lucene.search.Query#rewrite(IndexReader) rewrite(IndexReader reader)} — Rewrites queries into primitive queries. Primitive queries are: {@link org.apache.lucene.search.TermQuery TermQuery}, {@link org.apache.lucene.search.BooleanQuery BooleanQuery}, and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher) createWeight(IndexSearcher searcher)}
  3. + >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)}

@@ -546,8 +546,8 @@ on the built-in available scoring models and extending or changing Similarity.

Assuming we are not sorting (since sorting doesn't affect the raw Lucene score), we call one of the search methods of the IndexSearcher, passing in the {@link org.apache.lucene.search.Weight Weight} object created by - {@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query) - IndexSearcher.createNormalizedWeight(Query)}, + {@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query,boolean) + IndexSearcher.createNormalizedWeight(Query,boolean)}, {@link org.apache.lucene.search.Filter Filter} and the number of results we want. This method returns a {@link org.apache.lucene.search.TopDocs TopDocs} object, which is an internal collection of search results. The IndexSearcher creates diff --git a/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadNearQuery.java b/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadNearQuery.java index be12f19849d..924e6b92200 100644 --- a/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadNearQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadNearQuery.java @@ -71,7 +71,7 @@ public class PayloadNearQuery extends SpanNearQuery { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new PayloadNearSpanWeight(this, searcher); } @@ -148,14 +148,14 @@ public class PayloadNearQuery extends SpanNearQuery { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new PayloadNearSpanScorer(query.getSpans(context, acceptDocs, termContexts), this, similarity, similarity.simScorer(stats, context)); } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - PayloadNearSpanScorer scorer = (PayloadNearSpanScorer) scorer(context, context.reader().getLiveDocs(), true); + PayloadNearSpanScorer scorer = (PayloadNearSpanScorer) scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { diff --git a/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadTermQuery.java b/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadTermQuery.java index 0bab87093ab..a9a2655d17e 100644 --- a/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadTermQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/payloads/PayloadTermQuery.java @@ -67,7 +67,7 @@ public class PayloadTermQuery extends SpanTermQuery { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new PayloadTermWeight(this, searcher); } @@ -79,7 +79,7 @@ public class PayloadTermQuery extends SpanTermQuery { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new PayloadTermSpanScorer((TermSpans) query.getSpans(context, acceptDocs, termContexts), this, similarity.simScorer(stats, context)); } @@ -176,7 +176,7 @@ public class PayloadTermQuery extends SpanTermQuery { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - PayloadTermSpanScorer scorer = (PayloadTermSpanScorer) scorer(context, context.reader().getLiveDocs(), true); + PayloadTermSpanScorer scorer = (PayloadTermSpanScorer) scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java index b2c9cd577e9..ed213063538 100644 --- a/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/spans/FieldMaskingSpanQuery.java @@ -106,8 +106,8 @@ public class FieldMaskingSpanQuery extends SpanQuery { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return maskedQuery.createWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return maskedQuery.createWeight(searcher, needsScores); } @Override diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java index f30c467118b..00bed758488 100644 --- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java +++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanQuery.java @@ -42,7 +42,7 @@ public abstract class SpanQuery extends Query { public abstract String getField(); @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new SpanWeight(this, searcher); } diff --git a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java index c76e062c1eb..c172243e86c 100644 --- a/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java +++ b/lucene/core/src/java/org/apache/lucene/search/spans/SpanWeight.java @@ -41,13 +41,14 @@ import org.apache.lucene.util.Bits; * Expert-only. Public for use by other weight implementations */ public class SpanWeight extends Weight { - protected Similarity similarity; - protected Map termContexts; - protected SpanQuery query; + protected final Similarity similarity; + protected final Map termContexts; + protected final SpanQuery query; protected Similarity.SimWeight stats; public SpanWeight(SpanQuery query, IndexSearcher searcher) throws IOException { + super(query); this.similarity = searcher.getSimilarity(); this.query = query; @@ -71,9 +72,6 @@ public class SpanWeight extends Weight { } } - @Override - public Query getQuery() { return query; } - @Override public float getValueForNormalization() throws IOException { return stats == null ? 1.0f : stats.getValueForNormalization(); @@ -87,7 +85,7 @@ public class SpanWeight extends Weight { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { if (stats == null) { return null; } else { @@ -97,7 +95,7 @@ public class SpanWeight extends Weight { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - SpanScorer scorer = (SpanScorer) scorer(context, context.reader().getLiveDocs(), true); + SpanScorer scorer = (SpanScorer) scorer(context, context.reader().getLiveDocs()); if (scorer != null) { int newDoc = scorer.advance(doc); if (newDoc == doc) { diff --git a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java index e032f4fbfc2..843c092fa7d 100644 --- a/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java +++ b/lucene/core/src/test/org/apache/lucene/search/JustCompileSearch.java @@ -287,13 +287,12 @@ final class JustCompileSearch { static final class JustCompileWeight extends Weight { - @Override - public Explanation explain(LeafReaderContext context, int doc) { - throw new UnsupportedOperationException(UNSUPPORTED_MSG); + protected JustCompileWeight() { + super(null); } @Override - public Query getQuery() { + public Explanation explain(LeafReaderContext context, int doc) { throw new UnsupportedOperationException(UNSUPPORTED_MSG); } @@ -308,7 +307,7 @@ final class JustCompileSearch { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) { throw new UnsupportedOperationException(UNSUPPORTED_MSG); } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java index 32afad88e7b..593a9702973 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanCoord.java @@ -706,15 +706,15 @@ public class TestBooleanCoord extends LuceneTestCase { /** asserts score for our single matching good doc */ private void assertScore(final float expected, Query query) throws Exception { // test in-order - Weight weight = searcher.createNormalizedWeight(query); - Scorer scorer = weight.scorer(reader.leaves().get(0), null, true); + Weight weight = searcher.createNormalizedWeight(query, true); + Scorer scorer = weight.scorer(reader.leaves().get(0), null); assertTrue(scorer.docID() == -1 || scorer.docID() == DocIdSetIterator.NO_MORE_DOCS); assertEquals(0, scorer.nextDoc()); assertEquals(expected, scorer.score(), 0.0001f); // test bulk scorer final AtomicBoolean seen = new AtomicBoolean(false); - BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), null, true); + BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), null); assertNotNull(bulkScorer); bulkScorer.score(new LeafCollector() { Scorer scorer; diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java index 4f3876b2288..2f7de6fa0cd 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanOr.java @@ -185,10 +185,10 @@ public class TestBooleanOr extends LuceneTestCase { bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD); - Weight w = s.createNormalizedWeight(bq); + Weight w = s.createNormalizedWeight(bq, true); assertEquals(1, s.getIndexReader().leaves().size()); - BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null, true); + BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null); final FixedBitSet hits = new FixedBitSet(docCount); final AtomicInteger end = new AtomicInteger(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java index e6556bd9c0a..be38c142ff0 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQuery.java @@ -231,9 +231,9 @@ public class TestBooleanQuery extends LuceneTestCase { q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD)); } - Weight weight = s.createNormalizedWeight(q); + Weight weight = s.createNormalizedWeight(q, true); - Scorer scorer = weight.scorer(s.leafContexts.get(0), null, true); + Scorer scorer = weight.scorer(s.leafContexts.get(0), null); // First pass: just use .nextDoc() to gather all hits final List hits = new ArrayList<>(); @@ -249,8 +249,8 @@ public class TestBooleanQuery extends LuceneTestCase { // verify exact match: for(int iter2=0;iter2<10;iter2++) { - weight = s.createNormalizedWeight(q); - scorer = weight.scorer(s.leafContexts.get(0), null, true); + weight = s.createNormalizedWeight(q, true); + scorer = weight.scorer(s.leafContexts.get(0), null); if (VERBOSE) { System.out.println(" iter2=" + iter2); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java index 5c7b1ad5303..6a9576d36e8 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanQueryVisitSubscorers.java @@ -282,11 +282,11 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase { static class BooleanQuery2 extends BooleanQuery { @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new BooleanWeight(searcher, false) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new BooleanWeight(searcher, needsScores, false) { @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - Scorer scorer = scorer(context, acceptDocs, needsScores); + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + Scorer scorer = scorer(context, acceptDocs); if (scorer == null) { return null; } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java index 057507e22c2..0aa716af7a8 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestBooleanScorer.java @@ -70,18 +70,13 @@ public class TestBooleanScorer extends LuceneTestCase { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new Weight() { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new Weight(CrazyMustUseBulkScorerQuery.this) { @Override public Explanation explain(LeafReaderContext context, int doc) { throw new UnsupportedOperationException(); } - @Override - public Query getQuery() { - return CrazyMustUseBulkScorerQuery.this; - } - @Override public float getValueForNormalization() { return 1.0f; @@ -92,12 +87,12 @@ public class TestBooleanScorer extends LuceneTestCase { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) { throw new UnsupportedOperationException(); } @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) { + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) { return new BulkScorer() { @Override public int score(LeafCollector collector, int min, int max) throws IOException { diff --git a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java index 268a504b5c1..384f99efcf0 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestDisjunctionMaxQuery.java @@ -178,9 +178,9 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { QueryUtils.check(random(), dq, s); assertTrue(s.getTopReaderContext() instanceof LeafReaderContext); - final Weight dw = s.createNormalizedWeight(dq); + final Weight dw = s.createNormalizedWeight(dq, true); LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext(); - final Scorer ds = dw.scorer(context, context.reader().getLiveDocs(), true); + final Scorer ds = dw.scorer(context, context.reader().getLiveDocs()); final boolean skipOk = ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS; if (skipOk) { fail("firsttime skipTo found a match? ... " @@ -194,9 +194,9 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase { dq.add(tq("dek", "DOES_NOT_EXIST")); assertTrue(s.getTopReaderContext() instanceof LeafReaderContext); QueryUtils.check(random(), dq, s); - final Weight dw = s.createNormalizedWeight(dq); + final Weight dw = s.createNormalizedWeight(dq, true); LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext(); - final Scorer ds = dw.scorer(context, context.reader().getLiveDocs(), true); + final Scorer ds = dw.scorer(context, context.reader().getLiveDocs()); assertTrue("firsttime skipTo found no match", ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS); assertEquals("found wrong docid", "d4", r.document(ds.docID()).get("id")); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java index 1f44ebd9443..0df733748a8 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMinShouldMatch2.java @@ -125,17 +125,17 @@ public class TestMinShouldMatch2 extends LuceneTestCase { } bq.setMinimumNumberShouldMatch(minShouldMatch); - BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq); + BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq, true); switch (mode) { case DOC_VALUES: return new SlowMinShouldMatchScorer(weight, reader, searcher); case SCORER: - return weight.scorer(reader.getContext(), null, true); + return weight.scorer(reader.getContext(), null); case BULK_SCORER: - final BulkScorer bulkScorer = weight.booleanScorer(reader.getContext(), null, true); + final BulkScorer bulkScorer = weight.booleanScorer(reader.getContext(), null); if (bulkScorer == null) { - if (weight.scorer(reader.getContext(), null, true) != null) { + if (weight.scorer(reader.getContext(), null) != null) { throw new AssertionError("BooleanScorer should be applicable for this query"); } return null; diff --git a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java index dc5a2498424..4e36ad98e55 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java @@ -344,7 +344,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase { MultiPhraseQuery query = new MultiPhraseQuery(); query.add(new Term[] { new Term("body", "this"), new Term("body", "that") }); query.add(new Term("body", "is")); - Weight weight = query.createWeight(searcher); + Weight weight = query.createWeight(searcher, true); assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f); writer.close(); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java index 5ea6ca5ee93..67a64d94694 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestNeedsScores.java @@ -109,19 +109,14 @@ public class TestNeedsScores extends LuceneTestCase { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - final Weight w = in.createWeight(searcher); - return new Weight() { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + final Weight w = in.createWeight(searcher, needsScores); + return new Weight(AssertNeedsScores.this) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return w.explain(context, doc); } - @Override - public Query getQuery() { - return AssertNeedsScores.this; - } - @Override public float getValueForNormalization() throws IOException { return w.getValueForNormalization(); @@ -133,9 +128,9 @@ public class TestNeedsScores extends LuceneTestCase { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { assertEquals("query=" + in, value, needsScores); - return w.scorer(context, acceptDocs, needsScores); + return w.scorer(context, acceptDocs); } }; } diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java index 5e5e747b502..3279ab47a2a 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestPositiveScoresOnlyCollector.java @@ -107,7 +107,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase { IndexReader ir = writer.getReader(); writer.close(); IndexSearcher searcher = newSearcher(ir); - Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher); + Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true); Scorer s = new SimpleScorer(fake); TopDocsCollector tdc = TopScoreDocCollector.create(scores.length); Collector c = new PositiveScoresOnlyCollector(tdc); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java b/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java index 1f3f3340e96..0aac3165da9 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestQueryRescorer.java @@ -425,14 +425,9 @@ public class TestQueryRescorer extends LuceneTestCase { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new Weight() { - - @Override - public Query getQuery() { - return FixedScoreQuery.this; - } + return new Weight(FixedScoreQuery.this) { @Override public float getValueForNormalization() { @@ -444,7 +439,7 @@ public class TestQueryRescorer extends LuceneTestCase { } @Override - public Scorer scorer(final LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(final LeafReaderContext context, Bits acceptDocs) throws IOException { return new Scorer(null) { int docID = -1; diff --git a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java index 7b8ad4550d9..449acf00b6f 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestScoreCachingWrappingScorer.java @@ -130,7 +130,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase { IndexReader ir = writer.getReader(); writer.close(); IndexSearcher searcher = newSearcher(ir); - Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher); + Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true); Scorer s = new SimpleScorer(fake); ScoreCachingCollector scc = new ScoreCachingCollector(scores.length); scc.setScorer(s); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java index 313fe90c06f..751b3dd0771 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTermScorer.java @@ -75,10 +75,10 @@ public class TestTermScorer extends LuceneTestCase { Term allTerm = new Term(FIELD, "all"); TermQuery termQuery = new TermQuery(allTerm); - Weight weight = indexSearcher.createNormalizedWeight(termQuery); + Weight weight = indexSearcher.createNormalizedWeight(termQuery, true); assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext); LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext(); - BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs(), true); + BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs()); // we have 2 documents with the term all in them, one document for all the // other values final List docs = new ArrayList<>(); @@ -137,10 +137,10 @@ public class TestTermScorer extends LuceneTestCase { Term allTerm = new Term(FIELD, "all"); TermQuery termQuery = new TermQuery(allTerm); - Weight weight = indexSearcher.createNormalizedWeight(termQuery); + Weight weight = indexSearcher.createNormalizedWeight(termQuery, true); assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext); LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext(); - Scorer ts = weight.scorer(context, context.reader().getLiveDocs(), true); + Scorer ts = weight.scorer(context, context.reader().getLiveDocs()); assertTrue("next did not return a doc", ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); assertTrue("score is not correct", ts.score() == 1.6931472f); @@ -156,10 +156,10 @@ public class TestTermScorer extends LuceneTestCase { Term allTerm = new Term(FIELD, "all"); TermQuery termQuery = new TermQuery(allTerm); - Weight weight = indexSearcher.createNormalizedWeight(termQuery); + Weight weight = indexSearcher.createNormalizedWeight(termQuery, true); assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext); LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext(); - Scorer ts = weight.scorer(context, context.reader().getLiveDocs(), true); + Scorer ts = weight.scorer(context, context.reader().getLiveDocs()); assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS); // The next doc should be doc 5 assertTrue("doc should be number 5", ts.docID() == 5); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java index 09677128066..24e784bbafd 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsMerge.java @@ -20,8 +20,6 @@ package org.apache.lucene.search; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatDocValuesField; -import org.apache.lucene.document.FloatField; -import org.apache.lucene.document.IntField; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.LeafReaderContext; @@ -56,8 +54,9 @@ public class TestTopDocsMerge extends LuceneTestCase { } public TopDocs search(Weight weight, int topN) throws IOException { - return search(ctx, weight, null, topN); - } + TopScoreDocCollector collector = TopScoreDocCollector.create(topN); + search(ctx, weight, collector); + return collector.topDocs(); } @Override public String toString() { @@ -252,9 +251,14 @@ public class TestTopDocsMerge extends LuceneTestCase { } // ... then all shards: - final Weight w = searcher.createNormalizedWeight(query); + final Weight w = searcher.createNormalizedWeight(query, true); - final TopDocs[] shardHits = new TopDocs[subSearchers.length]; + final TopDocs[] shardHits; + if (sort == null) { + shardHits = new TopDocs[subSearchers.length]; + } else { + shardHits = new TopFieldDocs[subSearchers.length]; + } for(int shardIDX=0;shardIDX { @SuppressWarnings({"unchecked","rawtypes"}) final GroupDocs[] mergedGroupDocs = new GroupDocs[numGroups]; - final TopDocs[] shardTopDocs = new TopDocs[shardGroups.length]; + final TopDocs[] shardTopDocs; + if (docSort == null) { + shardTopDocs = new TopDocs[shardGroups.length]; + } else { + shardTopDocs = new TopFieldDocs[shardGroups.length]; + } float totalMaxScore = Float.MIN_VALUE; for(int groupIDX=0;groupIDX { } */ - shardTopDocs[shardIDX] = new TopDocs(shardGroupDocs.totalHits, - shardGroupDocs.scoreDocs, - shardGroupDocs.maxScore); + if (docSort == null) { + shardTopDocs[shardIDX] = new TopDocs(shardGroupDocs.totalHits, + shardGroupDocs.scoreDocs, + shardGroupDocs.maxScore); + } else { + shardTopDocs[shardIDX] = new TopFieldDocs(shardGroupDocs.totalHits, + shardGroupDocs.scoreDocs, + docSort.getSort(), + shardGroupDocs.maxScore); + } maxScore = Math.max(maxScore, shardGroupDocs.maxScore); totalHits += shardGroupDocs.totalHits; scoreSum += shardGroupDocs.score; } - final TopDocs mergedTopDocs = TopDocs.merge(docSort, docOffset + docTopN, shardTopDocs); + final TopDocs mergedTopDocs; + if (docSort == null) { + mergedTopDocs = TopDocs.merge(docOffset + docTopN, shardTopDocs); + } else { + mergedTopDocs = TopDocs.merge(docSort, docOffset + docTopN, (TopFieldDocs[]) shardTopDocs); + } // Slice; final ScoreDoc[] mergedScoreDocs; diff --git a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java index cc9e75adb16..c01b4ec1ac0 100644 --- a/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java +++ b/lucene/grouping/src/test/org/apache/lucene/search/grouping/TestGrouping.java @@ -1175,7 +1175,7 @@ public class TestGrouping extends LuceneTestCase { System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV); } // Run 1st pass collector to get top groups per shard - final Weight w = topSearcher.createNormalizedWeight(query); + final Weight w = topSearcher.createNormalizedWeight(query, true); final List>> shardGroups = new ArrayList<>(); List> firstPassGroupingCollectors = new ArrayList<>(); AbstractFirstPassGroupingCollector firstPassCollector = null; diff --git a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java index bbc79d4d180..0713c0dfa13 100644 --- a/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java +++ b/lucene/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java @@ -602,7 +602,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.add(new ToChildBlockJoinQuery(new TermQuery( - new Term(FIELD_NAME, "parent")), parentFilter, false), Occur.MUST); + new Term(FIELD_NAME, "parent")), parentFilter), Occur.MUST); booleanQuery.add(new TermQuery(new Term(FIELD_NAME, "child")), Occur.MUST); query = booleanQuery; diff --git a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java index e3620bd59df..e8fd8827136 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/TermsIncludingScoreQuery.java @@ -123,9 +123,9 @@ class TermsIncludingScoreQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - final Weight originalWeight = originalQuery.createWeight(searcher); - return new Weight() { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + final Weight originalWeight = originalQuery.createWeight(searcher, needsScores); + return new Weight(TermsIncludingScoreQuery.this) { private TermsEnum segmentTermsEnum; @@ -149,11 +149,6 @@ class TermsIncludingScoreQuery extends Query { return new ComplexExplanation(false, 0.0f, "Not a match"); } - @Override - public Query getQuery() { - return TermsIncludingScoreQuery.this; - } - @Override public float getValueForNormalization() throws IOException { return originalWeight.getValueForNormalization() * TermsIncludingScoreQuery.this.getBoost() * TermsIncludingScoreQuery.this.getBoost(); @@ -165,7 +160,7 @@ class TermsIncludingScoreQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { Terms terms = context.reader().terms(field); if (terms == null) { return null; diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java index 24adabb7193..0a85dbcdb3a 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/ToChildBlockJoinQuery.java @@ -61,34 +61,30 @@ public class ToChildBlockJoinQuery extends Query { // original, so that user does not have to .rewrite() their // query before searching: private final Query origParentQuery; - private final boolean doScores; /** * Create a ToChildBlockJoinQuery. * * @param parentQuery Query that matches parent documents * @param parentsFilter Filter identifying the parent documents. - * @param doScores true if parent scores should be calculated */ - public ToChildBlockJoinQuery(Query parentQuery, BitDocIdSetFilter parentsFilter, boolean doScores) { + public ToChildBlockJoinQuery(Query parentQuery, BitDocIdSetFilter parentsFilter) { super(); this.origParentQuery = parentQuery; this.parentQuery = parentQuery; this.parentsFilter = parentsFilter; - this.doScores = doScores; } - private ToChildBlockJoinQuery(Query origParentQuery, Query parentQuery, BitDocIdSetFilter parentsFilter, boolean doScores) { + private ToChildBlockJoinQuery(Query origParentQuery, Query parentQuery, BitDocIdSetFilter parentsFilter) { super(); this.origParentQuery = origParentQuery; this.parentQuery = parentQuery; this.parentsFilter = parentsFilter; - this.doScores = doScores; } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher), parentsFilter, doScores); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores), parentsFilter, needsScores); } /** Return our parent query. */ @@ -103,18 +99,13 @@ public class ToChildBlockJoinQuery extends Query { private final boolean doScores; public ToChildBlockJoinWeight(Query joinQuery, Weight parentWeight, BitDocIdSetFilter parentsFilter, boolean doScores) { - super(); + super(joinQuery); this.joinQuery = joinQuery; this.parentWeight = parentWeight; this.parentsFilter = parentsFilter; this.doScores = doScores; } - @Override - public Query getQuery() { - return joinQuery; - } - @Override public float getValueForNormalization() throws IOException { return parentWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost(); @@ -128,9 +119,9 @@ public class ToChildBlockJoinQuery extends Query { // NOTE: acceptDocs applies (and is checked) only in the // child document space @Override - public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs) throws IOException { - final Scorer parentScorer = parentWeight.scorer(readerContext, null, needsScores); + final Scorer parentScorer = parentWeight.scorer(readerContext, null); if (parentScorer == null) { // No matches @@ -354,8 +345,7 @@ public class ToChildBlockJoinQuery extends Query { if (parentRewrite != parentQuery) { Query rewritten = new ToChildBlockJoinQuery(parentQuery, parentRewrite, - parentsFilter, - doScores); + parentsFilter); rewritten.setBoost(getBoost()); return rewritten; } else { @@ -374,7 +364,6 @@ public class ToChildBlockJoinQuery extends Query { final ToChildBlockJoinQuery other = (ToChildBlockJoinQuery) _other; return origParentQuery.equals(other.origParentQuery) && parentsFilter.equals(other.parentsFilter) && - doScores == other.doScores && super.equals(other); } else { return false; @@ -386,7 +375,6 @@ public class ToChildBlockJoinQuery extends Query { final int prime = 31; int hash = super.hashCode(); hash = prime * hash + origParentQuery.hashCode(); - hash = prime * hash + new Boolean(doScores).hashCode(); hash = prime * hash + parentsFilter.hashCode(); return hash; } @@ -394,7 +382,6 @@ public class ToChildBlockJoinQuery extends Query { @Override public ToChildBlockJoinQuery clone() { return new ToChildBlockJoinQuery(origParentQuery.clone(), - parentsFilter, - doScores); + parentsFilter); } } diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java index ca2ef13a961..3f785e9c7e9 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinIndexSearcher.java @@ -56,7 +56,7 @@ public class ToParentBlockJoinIndexSearcher extends IndexSearcher { // we force the use of Scorer (not BulkScorer) to make sure // that the scorer passed to LeafCollector.setScorer supports // Scorer.getChildren - Scorer scorer = weight.scorer(ctx, ctx.reader().getLiveDocs(), true); + Scorer scorer = weight.scorer(ctx, ctx.reader().getLiveDocs()); if (scorer != null) { final LeafCollector leafCollector = collector.getLeafCollector(ctx); leafCollector.setScorer(scorer); diff --git a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java index 4a157dba92d..324d5ed2a91 100644 --- a/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java +++ b/lucene/join/src/java/org/apache/lucene/search/join/ToParentBlockJoinQuery.java @@ -120,8 +120,8 @@ public class ToParentBlockJoinQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores), parentsFilter, scoreMode); } /** Return our child query. */ @@ -136,18 +136,13 @@ public class ToParentBlockJoinQuery extends Query { private final ScoreMode scoreMode; public BlockJoinWeight(Query joinQuery, Weight childWeight, BitDocIdSetFilter parentsFilter, ScoreMode scoreMode) { - super(); + super(joinQuery); this.joinQuery = joinQuery; this.childWeight = childWeight; this.parentsFilter = parentsFilter; this.scoreMode = scoreMode; } - @Override - public Query getQuery() { - return joinQuery; - } - @Override public float getValueForNormalization() throws IOException { return childWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost(); @@ -161,9 +156,9 @@ public class ToParentBlockJoinQuery extends Query { // NOTE: acceptDocs applies (and is checked) only in the // parent document space @Override - public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs) throws IOException { - final Scorer childScorer = childWeight.scorer(readerContext, readerContext.reader().getLiveDocs(), needsScores); + final Scorer childScorer = childWeight.scorer(readerContext, readerContext.reader().getLiveDocs()); if (childScorer == null) { // No matches return null; @@ -189,7 +184,7 @@ public class ToParentBlockJoinQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, context.reader().getLiveDocs(), true); + BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, context.reader().getLiveDocs()); if (scorer != null && scorer.advance(doc) == doc) { return scorer.explain(context.docBase); } diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java index e05628b0a79..0a3dcfb9084 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoin.java @@ -231,7 +231,7 @@ public class TestBlockJoin extends LuceneTestCase { //System.out.println("TEST: now test up"); // Now join "up" (map parent hits to child docs) instead...: - ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter, random().nextBoolean()); + ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter); BooleanQuery fullChildQuery = new BooleanQuery(); fullChildQuery.add(new BooleanClause(parentJoinQuery, Occur.MUST)); fullChildQuery.add(new BooleanClause(childQuery, Occur.MUST)); @@ -375,15 +375,15 @@ public class TestBlockJoin extends LuceneTestCase { TermQuery us = new TermQuery(new Term("country", "United States")); assertEquals("@ US we have java and ruby", 2, s.search(new ToChildBlockJoinQuery(us, - parentsFilter, random().nextBoolean()), 10).totalHits ); + parentsFilter), 10).totalHits ); - assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter, random().nextBoolean()), + assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter), skill("java"), 10).totalHits ); BooleanQuery rubyPython = new BooleanQuery(); rubyPython.add(new TermQuery(new Term("skill", "ruby")), Occur.SHOULD); rubyPython.add(new TermQuery(new Term("skill", "python")), Occur.SHOULD); - assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter, random().nextBoolean()), + assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter), new QueryWrapperFilter(rubyPython), 10).totalHits ); r.close(); @@ -919,7 +919,7 @@ public class TestBlockJoin extends LuceneTestCase { } // Maps parent query to child docs: - final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter, random().nextBoolean()); + final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter); // To run against the block-join index: final Query childJoinQuery2; @@ -1188,8 +1188,8 @@ public class TestBlockJoin extends LuceneTestCase { new TermQuery(new Term("parent", "1")))); ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg); - Weight weight = s.createNormalizedWeight(q); - DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null, true); + Weight weight = s.createNormalizedWeight(q, true); + DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null); assertEquals(1, disi.advance(1)); r.close(); dir.close(); @@ -1222,8 +1222,8 @@ public class TestBlockJoin extends LuceneTestCase { new TermQuery(new Term("isparent", "yes")))); ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg); - Weight weight = s.createNormalizedWeight(q); - DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null, true); + Weight weight = s.createNormalizedWeight(q, true); + DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null); assertEquals(2, disi.advance(0)); r.close(); dir.close(); @@ -1548,7 +1548,7 @@ public class TestBlockJoin extends LuceneTestCase { Query parentQuery = new TermQuery(new Term("parent", "2")); - ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter, random().nextBoolean()); + ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter); TopDocs topdocs = s.search(parentJoinQuery, 3); assertEquals(1, topdocs.totalHits); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java index 596f3abe6ff..8d35c41443b 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinValidation.java @@ -107,7 +107,7 @@ public class TestBlockJoinValidation extends LuceneTestCase { public void testNextDocValidationForToChildBjq() throws Exception { Query parentQueryWithRandomChild = createParentsQueryWithOneChild(getRandomChildNumber(0)); - ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false); + ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter); thrown.expect(IllegalStateException.class); thrown.expectMessage(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE); indexSearcher.search(blockJoinQuery, 1); @@ -117,7 +117,7 @@ public class TestBlockJoinValidation extends LuceneTestCase { public void testValidationForToChildBjqWithChildFilterQuery() throws Exception { Query parentQueryWithRandomChild = createParentQuery(); - ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false); + ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter); Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("common_field", "1"))); thrown.expect(IllegalStateException.class); thrown.expectMessage(ToChildBlockJoinQuery.ILLEGAL_ADVANCE_ON_PARENT); @@ -131,7 +131,7 @@ public class TestBlockJoinValidation extends LuceneTestCase { // in BJQ must be greater than child number in Boolean clause int nextRandomChildNumber = getRandomChildNumber(randomChildNumber); Query parentQueryWithRandomChild = createParentsQueryWithOneChild(nextRandomChildNumber); - ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false); + ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter); // advance() method is used by ConjunctionScorer, so we need to create Boolean conjunction query BooleanQuery conjunctionQuery = new BooleanQuery(); WildcardQuery childQuery = new WildcardQuery(new Term("child", createFieldValue(randomChildNumber))); diff --git a/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java index bff42a6da48..3b931992dd4 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/BoostingQuery.java @@ -54,8 +54,8 @@ public class BoostingQuery extends Query { public Query rewrite(IndexReader reader) throws IOException { BooleanQuery result = new BooleanQuery() { @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new BooleanWeight(searcher, false) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new BooleanWeight(searcher, needsScores, false) { @Override public float coord(int overlap, int max) { diff --git a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java index 78683be2e30..ddf60cd5681 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/CustomScoreQuery.java @@ -187,21 +187,16 @@ public class CustomScoreQuery extends Query { boolean qStrict; float queryWeight; - public CustomWeight(IndexSearcher searcher) throws IOException { - this.subQueryWeight = subQuery.createWeight(searcher); + public CustomWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + super(CustomScoreQuery.this); + this.subQueryWeight = subQuery.createWeight(searcher, needsScores); this.valSrcWeights = new Weight[scoringQueries.length]; for(int i = 0; i < scoringQueries.length; i++) { - this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher); + this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores); } this.qStrict = strict; } - /*(non-Javadoc) @see org.apache.lucene.search.Weight#getQuery() */ - @Override - public Query getQuery() { - return CustomScoreQuery.this; - } - @Override public float getValueForNormalization() throws IOException { float sum = subQueryWeight.getValueForNormalization(); @@ -235,14 +230,14 @@ public class CustomScoreQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context, acceptDocs, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + Scorer subQueryScorer = subQueryWeight.scorer(context, acceptDocs); if (subQueryScorer == null) { return null; } Scorer[] valSrcScorers = new Scorer[valSrcWeights.length]; for(int i = 0; i < valSrcScorers.length; i++) { - valSrcScorers[i] = valSrcWeights[i].scorer(context, acceptDocs, needsScores); + valSrcScorers[i] = valSrcWeights[i].scorer(context, acceptDocs); } return new CustomScorer(CustomScoreQuery.this.getCustomScoreProvider(context), this, queryWeight, subQueryScorer, valSrcScorers); } @@ -373,8 +368,8 @@ public class CustomScoreQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new CustomWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new CustomWeight(searcher, needsScores); } /** diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java index 2c76b00fa6f..82c9fceda4c 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/function/BoostedQuery.java @@ -68,8 +68,8 @@ public class BoostedQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return new BoostedQuery.BoostedWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return new BoostedQuery.BoostedWeight(searcher, needsScores); } private class BoostedWeight extends Weight { @@ -77,18 +77,14 @@ public class BoostedQuery extends Query { Weight qWeight; Map fcontext; - public BoostedWeight(IndexSearcher searcher) throws IOException { + public BoostedWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + super(BoostedQuery.this); this.searcher = searcher; - this.qWeight = q.createWeight(searcher); + this.qWeight = q.createWeight(searcher, needsScores); this.fcontext = ValueSource.newContext(searcher); boostVal.createWeight(fcontext,searcher); } - @Override - public Query getQuery() { - return BoostedQuery.this; - } - @Override public float getValueForNormalization() throws IOException { float sum = qWeight.getValueForNormalization(); @@ -103,8 +99,8 @@ public class BoostedQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - Scorer subQueryScorer = qWeight.scorer(context, acceptDocs, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + Scorer subQueryScorer = qWeight.scorer(context, acceptDocs); if (subQueryScorer == null) { return null; } diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java index d5e647ef62d..b338452f99b 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/function/FunctionQuery.java @@ -72,16 +72,12 @@ public class FunctionQuery extends Query { protected final Map context; public FunctionWeight(IndexSearcher searcher) throws IOException { + super(FunctionQuery.this); this.searcher = searcher; this.context = ValueSource.newContext(searcher); func.createWeight(context, searcher); } - @Override - public Query getQuery() { - return FunctionQuery.this; - } - @Override public float getValueForNormalization() throws IOException { queryWeight = getBoost(); @@ -95,13 +91,13 @@ public class FunctionQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new AllScorer(context, acceptDocs, this, queryWeight); } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return ((AllScorer)scorer(context, context.reader().getLiveDocs(), true)).explain(doc); + return ((AllScorer)scorer(context, context.reader().getLiveDocs())).explain(doc); } } @@ -208,7 +204,7 @@ public class FunctionQuery extends Query { @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new FunctionQuery.FunctionWeight(searcher); } diff --git a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java index dab719471d3..f3ceae3176f 100644 --- a/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java +++ b/lucene/queries/src/java/org/apache/lucene/queries/function/valuesource/QueryValueSource.java @@ -72,7 +72,7 @@ public class QueryValueSource extends ValueSource { @Override public void createWeight(Map context, IndexSearcher searcher) throws IOException { - Weight w = searcher.createNormalizedWeight(q); + Weight w = searcher.createNormalizedWeight(q, true); context.put(this, w); } } @@ -126,7 +126,7 @@ class QueryDocValues extends FloatDocValues { try { if (doc < lastDocRequested) { if (noMatches) return defVal; - scorer = weight.scorer(readerContext, acceptDocs, true); + scorer = weight.scorer(readerContext, acceptDocs); if (scorer==null) { noMatches = true; return defVal; @@ -157,7 +157,7 @@ class QueryDocValues extends FloatDocValues { try { if (doc < lastDocRequested) { if (noMatches) return false; - scorer = weight.scorer(readerContext, acceptDocs, true); + scorer = weight.scorer(readerContext, acceptDocs); scorerDoc = -1; if (scorer==null) { noMatches = true; @@ -215,7 +215,7 @@ class QueryDocValues extends FloatDocValues { mval.exists = false; return; } - scorer = weight.scorer(readerContext, acceptDocs, true); + scorer = weight.scorer(readerContext, acceptDocs); scorerDoc = -1; if (scorer==null) { noMatches = true; diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java index 428fe32178a..14019713e15 100644 --- a/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java +++ b/lucene/sandbox/src/java/org/apache/lucene/search/TermAutomatonQuery.java @@ -188,7 +188,7 @@ public class TermAutomatonQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { IndexReaderContext context = searcher.getTopReaderContext(); Map termStates = new HashMap<>(); @@ -347,6 +347,7 @@ public class TermAutomatonQuery extends Query { private final Similarity similarity; public TermAutomatonWeight(Automaton automaton, IndexSearcher searcher, Map termStates) throws IOException { + super(TermAutomatonQuery.this); this.automaton = automaton; this.searcher = searcher; this.termStates = termStates; @@ -369,11 +370,6 @@ public class TermAutomatonQuery extends Query { return "weight(" + TermAutomatonQuery.this + ")"; } - @Override - public Query getQuery() { - return TermAutomatonQuery.this; - } - @Override public float getValueForNormalization() { return stats.getValueForNormalization(); @@ -385,7 +381,7 @@ public class TermAutomatonQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { // Initialize the enums; null for a given slot means that term didn't appear in this reader EnumAndScorer[] enums = new EnumAndScorer[idToTerm.size()]; diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowCollationMethods.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowCollationMethods.java index fe932ae83a5..3b505c86249 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowCollationMethods.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/queries/TestSlowCollationMethods.java @@ -99,13 +99,13 @@ public class TestSlowCollationMethods extends LuceneTestCase { }); final Sort sort = new Sort(sf); - final TopDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random().nextInt(4)), sort); + final TopFieldDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random().nextInt(4)), sort); doCheckSorting(docs1); - final TopDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random().nextInt(4)), sort); + final TopFieldDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random().nextInt(4)), sort); doCheckSorting(docs2); - final TopDocs docs = TopDocs.merge(sort, numDocs/(1+random().nextInt(4)), new TopDocs[]{docs1, docs2}); + final TopFieldDocs docs = TopDocs.merge(sort, numDocs/(1+random().nextInt(4)), new TopFieldDocs[]{docs1, docs2}); doCheckSorting(docs); } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java index eea5d949e96..1cced2508dc 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingIndexSearcher.java @@ -56,8 +56,8 @@ public class AssertingIndexSearcher extends IndexSearcher { /** Ensures, that the returned {@code Weight} is not normalized again, which may produce wrong scores. */ @Override - public Weight createNormalizedWeight(Query query) throws IOException { - final Weight w = super.createNormalizedWeight(query); + public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { + final Weight w = super.createNormalizedWeight(query, needsScores); return new AssertingWeight(random, w) { @Override @@ -66,8 +66,8 @@ public class AssertingIndexSearcher extends IndexSearcher { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - Scorer scorer = w.scorer(context, acceptDocs, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + Scorer scorer = w.scorer(context, acceptDocs); if (scorer != null) { // check that scorer obeys disi contract for docID() before next()/advance try { diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java index 3aa87b7368b..ae2f9a32602 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingQuery.java @@ -42,8 +42,8 @@ public class AssertingQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher)); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher, needsScores)); } @Override diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java index 118d259998b..1b801fe1ad1 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/AssertingWeight.java @@ -33,6 +33,7 @@ class AssertingWeight extends Weight { final Weight in; AssertingWeight(Random random, Weight in) { + super(in.getQuery()); this.random = random; this.in = in; } @@ -42,11 +43,6 @@ class AssertingWeight extends Weight { return in.explain(context, doc); } - @Override - public Query getQuery() { - return in.getQuery(); - } - @Override public float getValueForNormalization() throws IOException { return in.getValueForNormalization(); @@ -58,15 +54,15 @@ class AssertingWeight extends Weight { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - final Scorer inScorer = in.scorer(context, acceptDocs, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + final Scorer inScorer = in.scorer(context, acceptDocs); assert inScorer == null || inScorer.docID() == -1; return AssertingScorer.wrap(new Random(random.nextLong()), inScorer); } @Override - public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - BulkScorer inScorer = in.bulkScorer(context, acceptDocs, needsScores); + public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + BulkScorer inScorer = in.bulkScorer(context, acceptDocs); if (inScorer == null) { return null; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java index ae172924ea9..080353752b8 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/QueryUtils.java @@ -261,9 +261,9 @@ public class QueryUtils { lastDoc[0] = doc; try { if (scorer == null) { - Weight w = s.createNormalizedWeight(q); + Weight w = s.createNormalizedWeight(q, true); LeafReaderContext context = readerContextArray.get(leafPtr); - scorer = w.scorer(context, context.reader().getLiveDocs(), true); + scorer = w.scorer(context, context.reader().getLiveDocs()); } int op = order[(opidx[0]++) % order.length]; @@ -313,9 +313,9 @@ public class QueryUtils { final LeafReader previousReader = lastReader[0]; IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader); indexSearcher.setSimilarity(s.getSimilarity()); - Weight w = indexSearcher.createNormalizedWeight(q); + Weight w = indexSearcher.createNormalizedWeight(q, true); LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext(); - Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs(), true); + Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs()); if (scorer != null) { boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS; Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); @@ -335,9 +335,9 @@ public class QueryUtils { final LeafReader previousReader = lastReader[0]; IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false); indexSearcher.setSimilarity(s.getSimilarity()); - Weight w = indexSearcher.createNormalizedWeight(q); + Weight w = indexSearcher.createNormalizedWeight(q, true); LeafReaderContext ctx = previousReader.getContext(); - Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs(), true); + Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs()); if (scorer != null) { boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS; Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); @@ -367,8 +367,8 @@ public class QueryUtils { try { long startMS = System.currentTimeMillis(); for (int i=lastDoc[0]+1; i<=doc; i++) { - Weight w = s.createNormalizedWeight(q); - Scorer scorer = w.scorer(context.get(leafPtr), liveDocs, true); + Weight w = s.createNormalizedWeight(q, true); + Scorer scorer = w.scorer(context.get(leafPtr), liveDocs); Assert.assertTrue("query collected "+doc+" but skipTo("+i+") says no more docs!",scorer.advance(i) != DocIdSetIterator.NO_MORE_DOCS); Assert.assertEquals("query collected "+doc+" but skipTo("+i+") got to "+scorer.docID(),doc,scorer.docID()); float skipToScore = scorer.score(); @@ -400,8 +400,8 @@ public class QueryUtils { final LeafReader previousReader = lastReader[0]; IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader); indexSearcher.setSimilarity(s.getSimilarity()); - Weight w = indexSearcher.createNormalizedWeight(q); - Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs(), true); + Weight w = indexSearcher.createNormalizedWeight(q, true); + Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs()); if (scorer != null) { boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS; Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); @@ -421,8 +421,8 @@ public class QueryUtils { final LeafReader previousReader = lastReader[0]; IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader); indexSearcher.setSimilarity(s.getSimilarity()); - Weight w = indexSearcher.createNormalizedWeight(q); - Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs(), true); + Weight w = indexSearcher.createNormalizedWeight(q, true); + Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs()); if (scorer != null) { boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS; Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more); @@ -432,10 +432,10 @@ public class QueryUtils { /** Check that the scorer and bulk scorer advance consistently. */ public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException { - Weight weight = searcher.createNormalizedWeight(query); + Weight weight = searcher.createNormalizedWeight(query, true); for (LeafReaderContext context : searcher.getIndexReader().leaves()) { - final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs(), true); - final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs(), true); + final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs()); + final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs()); if (scorer == null && bulkScorer == null) { continue; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java index 9bb3cd84f4c..89532f283f5 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/search/ShardSearchingTestBase.java @@ -360,7 +360,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase { } // Merge: - return TopDocs.merge(null, numHits, shardHits); + return TopDocs.merge(numHits, shardHits); } public TopDocs localSearch(Query query, int numHits) throws IOException { @@ -369,6 +369,9 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase { @Override public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException { + if (after == null) { + return super.searchAfter(after, query, numHits); + } final TopDocs[] shardHits = new TopDocs[nodeVersions.length]; // results are merged in that order: score, shardIndex, doc. therefore we set // after to after.score and depending on the nodeID we set doc to either: @@ -412,7 +415,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase { } // Merge: - return TopDocs.merge(null, numHits, shardHits); + return TopDocs.merge(numHits, shardHits); } public TopDocs localSearchAfter(ScoreDoc after, Query query, int numHits) throws IOException { @@ -422,14 +425,14 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase { @Override public TopFieldDocs search(Query query, int numHits, Sort sort) throws IOException { assert sort != null; - final TopDocs[] shardHits = new TopDocs[nodeVersions.length]; + final TopFieldDocs[] shardHits = new TopFieldDocs[nodeVersions.length]; for(int nodeID=0;nodeID 0) { DocIterator i = children.iterator(); diff --git a/solr/core/src/java/org/apache/solr/schema/LatLonType.java b/solr/core/src/java/org/apache/solr/schema/LatLonType.java index a0dd1990c5f..f9521fbed7a 100644 --- a/solr/core/src/java/org/apache/solr/schema/LatLonType.java +++ b/solr/core/src/java/org/apache/solr/schema/LatLonType.java @@ -319,6 +319,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter { protected Map lonContext; public SpatialWeight(IndexSearcher searcher) throws IOException { + super(SpatialDistanceQuery.this); this.searcher = searcher; this.latContext = ValueSource.newContext(searcher); this.lonContext = ValueSource.newContext(searcher); @@ -326,11 +327,6 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter { lonSource.createWeight(lonContext, searcher); } - @Override - public Query getQuery() { - return SpatialDistanceQuery.this; - } - @Override public float getValueForNormalization() throws IOException { queryWeight = getBoost(); @@ -344,13 +340,13 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new SpatialScorer(context, acceptDocs, this, queryWeight); } @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return ((SpatialScorer)scorer(context, context.reader().getLiveDocs(), true)).explain(doc); + return ((SpatialScorer)scorer(context, context.reader().getLiveDocs())).explain(doc); } } @@ -567,7 +563,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter { @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { // if we were supposed to use bboxQuery, then we should have been rewritten using that query assert bboxQuery == null; return new SpatialWeight(searcher); diff --git a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java index a94e6a9e18e..ad0063c5766 100644 --- a/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/ExportQParserPlugin.java @@ -80,7 +80,7 @@ public class ExportQParserPlugin extends QParserPlugin { } public Weight createWeight(IndexSearcher searcher) throws IOException { - return mainQuery.createWeight(searcher); + return mainQuery.createWeight(searcher, true); } public Query rewrite(IndexReader reader) throws IOException { diff --git a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java index dc10acf2e3e..544cf137ebc 100644 --- a/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/JoinQParserPlugin.java @@ -210,7 +210,7 @@ class JoinQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { return new JoinQueryWeight((SolrIndexSearcher)searcher); } @@ -224,6 +224,7 @@ class JoinQuery extends Query { ResponseBuilder rb; public JoinQueryWeight(SolrIndexSearcher searcher) { + super(JoinQuery.this); this.fromSearcher = searcher; SolrRequestInfo info = SolrRequestInfo.getRequestInfo(); if (info != null) { @@ -280,11 +281,6 @@ class JoinQuery extends Query { this.toSearcher = searcher; } - @Override - public Query getQuery() { - return JoinQuery.this; - } - @Override public float getValueForNormalization() throws IOException { queryWeight = getBoost(); @@ -303,7 +299,7 @@ class JoinQuery extends Query { @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { if (filter == null) { boolean debug = rb != null && rb.isDebug(); long start = debug ? System.currentTimeMillis() : 0; @@ -572,7 +568,7 @@ class JoinQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Scorer scorer = scorer(context, context.reader().getLiveDocs(), true); + Scorer scorer = scorer(context, context.reader().getLiveDocs()); boolean exists = scorer.advance(doc) == doc; ComplexExplanation result = new ComplexExplanation(); diff --git a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java index 6c332d13678..8438aacaca4 100644 --- a/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/ReRankQParserPlugin.java @@ -171,8 +171,8 @@ public class ReRankQParserPlugin extends QParserPlugin { } - public Weight createWeight(IndexSearcher searcher) throws IOException{ - return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException{ + return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, needsScores); } } @@ -182,23 +182,20 @@ public class ReRankQParserPlugin extends QParserPlugin { private Weight mainWeight; private double reRankWeight; - public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher) throws IOException { + public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores) throws IOException { + super(mainQuery); this.reRankQuery = reRankQuery; this.searcher = searcher; this.reRankWeight = reRankWeight; - this.mainWeight = mainQuery.createWeight(searcher); + this.mainWeight = mainQuery.createWeight(searcher, needsScores); } public float getValueForNormalization() throws IOException { return mainWeight.getValueForNormalization(); } - public Scorer scorer(LeafReaderContext context, Bits bits, boolean needsScores) throws IOException { - return mainWeight.scorer(context, bits, needsScores); - } - - public Query getQuery() { - return mainWeight.getQuery(); + public Scorer scorer(LeafReaderContext context, Bits bits) throws IOException { + return mainWeight.scorer(context, bits); } public void normalize(float norm, float topLevelBoost) { diff --git a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java index 6039a9baeae..770d56f6291 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java +++ b/solr/core/src/java/org/apache/solr/search/SolrConstantScoreQuery.java @@ -106,16 +106,12 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend private Map context; public ConstantWeight(IndexSearcher searcher) throws IOException { + super(SolrConstantScoreQuery.this); this.context = ValueSource.newContext(searcher); if (filter instanceof SolrFilter) ((SolrFilter)filter).createWeight(context, searcher); } - @Override - public Query getQuery() { - return SolrConstantScoreQuery.this; - } - @Override public float getValueForNormalization() throws IOException { queryWeight = getBoost(); @@ -129,7 +125,7 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { return new ConstantScorer(context, this, queryWeight, acceptDocs); } @@ -233,7 +229,7 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend } @Override - public Weight createWeight(IndexSearcher searcher) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) { try { return new SolrConstantScoreQuery.ConstantWeight(searcher); } catch (IOException e) { diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java index cc3771f5944..ececb2ca658 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java @@ -1113,7 +1113,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn List weights = new ArrayList<>(notCached.size()); for (Query q : notCached) { Query qq = QueryUtils.makeQueryable(q); - weights.add(createNormalizedWeight(qq)); + weights.add(createNormalizedWeight(qq, true)); } pf.filter = new FilterImpl(answer, weights); } else { @@ -2474,7 +2474,7 @@ class FilterImpl extends Filter { iterators.add(iter); } for (Weight w : weights) { - Scorer scorer = w.scorer(context, context.reader().getLiveDocs(), true); + Scorer scorer = w.scorer(context, context.reader().getLiveDocs()); if (scorer == null) return null; iterators.add(scorer); } diff --git a/solr/core/src/java/org/apache/solr/search/WrappedQuery.java b/solr/core/src/java/org/apache/solr/search/WrappedQuery.java index 462b25840fd..974e4d00f7a 100644 --- a/solr/core/src/java/org/apache/solr/search/WrappedQuery.java +++ b/solr/core/src/java/org/apache/solr/search/WrappedQuery.java @@ -53,8 +53,8 @@ public class WrappedQuery extends ExtendedQueryBase { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - return q.createWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + return q.createWeight(searcher, needsScores); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java index 63fbeb6ceec..7c564b92838 100644 --- a/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java +++ b/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java @@ -20,6 +20,7 @@ package org.apache.solr.search.grouping.distributed.responseprocessor; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.grouping.GroupDocs; import org.apache.lucene.search.grouping.TopGroups; import org.apache.lucene.util.BytesRef; @@ -171,7 +172,12 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor { } int topN = rb.getGroupingSpec().getOffset() + rb.getGroupingSpec().getLimit(); - TopDocs mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopDocs[topDocs.size()])); + final TopDocs mergedTopDocs; + if (sortWithinGroup == null) { + mergedTopDocs = TopDocs.merge(topN, topDocs.toArray(new TopDocs[topDocs.size()])); + } else { + mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()])); + } rb.mergedQueryCommandResults.put(query, new QueryCommandResult(mergedTopDocs, mergedMatches)); } diff --git a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java index d74650c9ecd..62fbe131824 100644 --- a/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java +++ b/solr/core/src/java/org/apache/solr/search/join/BlockJoinChildQParser.java @@ -29,7 +29,7 @@ public class BlockJoinChildQParser extends BlockJoinParentQParser { } protected Query createQuery(Query parentListQuery, Query query) { - return new ToChildBlockJoinQuery(query, getFilter(parentListQuery), false); + return new ToChildBlockJoinQuery(query, getFilter(parentListQuery)); } @Override diff --git a/solr/core/src/java/org/apache/solr/search/join/IgnoreAcceptDocsQuery.java b/solr/core/src/java/org/apache/solr/search/join/IgnoreAcceptDocsQuery.java index 148cc91a053..ebc5c41c7d2 100644 --- a/solr/core/src/java/org/apache/solr/search/join/IgnoreAcceptDocsQuery.java +++ b/solr/core/src/java/org/apache/solr/search/join/IgnoreAcceptDocsQuery.java @@ -53,8 +53,8 @@ public class IgnoreAcceptDocsQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { - Weight inner = q.createWeight(searcher); + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + Weight inner = q.createWeight(searcher, needsScores); return new IADWeight(inner); } @@ -62,6 +62,7 @@ public class IgnoreAcceptDocsQuery extends Query { Weight w; IADWeight(Weight delegate) { + super(q); this.w = delegate; } @@ -70,11 +71,6 @@ public class IgnoreAcceptDocsQuery extends Query { return w.explain(context, doc); } - @Override - public Query getQuery() { - return q; - } - @Override public float getValueForNormalization() throws IOException { return w.getValueForNormalization(); @@ -86,8 +82,8 @@ public class IgnoreAcceptDocsQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - return w.scorer(context, null, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + return w.scorer(context, null); } } diff --git a/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java b/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java index 65544f3d27d..cd84d210294 100644 --- a/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java +++ b/solr/core/src/java/org/apache/solr/update/DeleteByQueryWrapper.java @@ -64,17 +64,14 @@ final class DeleteByQueryWrapper extends Query { } @Override - public Weight createWeight(IndexSearcher searcher) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final LeafReader wrapped = wrap((LeafReader) searcher.getIndexReader()); final IndexSearcher privateContext = new IndexSearcher(wrapped); - final Weight inner = in.createWeight(privateContext); - return new Weight() { + final Weight inner = in.createWeight(privateContext, needsScores); + return new Weight(DeleteByQueryWrapper.this) { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); } - @Override - public Query getQuery() { return DeleteByQueryWrapper.this; } - @Override public float getValueForNormalization() throws IOException { return inner.getValueForNormalization(); } @@ -82,8 +79,8 @@ final class DeleteByQueryWrapper extends Query { public void normalize(float norm, float topLevelBoost) { inner.normalize(norm, topLevelBoost); } @Override - public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException { - return inner.scorer(privateContext.getIndexReader().leaves().get(0), acceptDocs, needsScores); + public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException { + return inner.scorer(privateContext.getIndexReader().leaves().get(0), acceptDocs); } }; } diff --git a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java index 2d4c04631c1..34f23c1de2c 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestRankQueryPlugin.java @@ -114,8 +114,8 @@ public class TestRankQueryPlugin extends QParserPlugin { return false; } - public Weight createWeight(IndexSearcher indexSearcher ) throws IOException{ - return q.createWeight(indexSearcher); + public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores) throws IOException{ + return q.createWeight(indexSearcher, needsScores); } public void setBoost(float boost) {