LUCENE-6220: Move needsScores to Query.createWeight.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1657874 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2015-02-06 15:54:53 +00:00
parent b2336e3ed5
commit e4436aa7fe
74 changed files with 499 additions and 650 deletions

View File

@ -71,8 +71,8 @@ API Changes
* LUCENE-6217: Add IndexWriter.isOpen and getTragicException. (Simon
Willnauer, Mike McCandless)
* LUCENE-6218: Add Collector.needsScores() and needsScores parameter
to Weight.scorer(). (Robert Muir)
* LUCENE-6218, LUCENE-6220: Add Collector.needsScores() and needsScores
parameter to Query.createWeight(). (Robert Muir, Adrien Grand)
* LUCENE-4524: Merge DocsEnum and DocsAndPositionsEnum into a single
PostingsEnum iterator. TermsEnum.docs() and TermsEnum.docsAndPositions()

View File

@ -174,15 +174,19 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
protected ArrayList<Weight> weights;
protected int maxCoord; // num optional + num required
private final boolean disableCoord;
private final boolean needsScores;
public BooleanWeight(IndexSearcher searcher, boolean disableCoord)
public BooleanWeight(IndexSearcher searcher, boolean needsScores, boolean disableCoord)
throws IOException {
super(BooleanQuery.this);
this.needsScores = needsScores;
this.similarity = searcher.getSimilarity();
this.disableCoord = disableCoord;
weights = new ArrayList<>(clauses.size());
for (int i = 0 ; i < clauses.size(); i++) {
BooleanClause c = clauses.get(i);
Weight w = c.getQuery().createWeight(searcher);
final boolean queryNeedsScores = needsScores && c.getOccur() != Occur.MUST_NOT;
Weight w = c.getQuery().createWeight(searcher, queryNeedsScores);
weights.add(w);
if (!c.isProhibited()) {
maxCoord++;
@ -190,9 +194,6 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
}
}
@Override
public Query getQuery() { return BooleanQuery.this; }
@Override
public float getValueForNormalization() throws IOException {
float sum = 0.0f;
@ -242,7 +243,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
for (Iterator<Weight> wIter = weights.iterator(); wIter.hasNext();) {
Weight w = wIter.next();
BooleanClause c = cIter.next();
if (w.scorer(context, context.reader().getLiveDocs(), true) == null) {
if (w.scorer(context, context.reader().getLiveDocs()) == null) {
if (c.isRequired()) {
fail = true;
Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
@ -307,12 +308,12 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
/** Try to build a boolean scorer for this weight. Returns null if {@link BooleanScorer}
* cannot be used. */
// pkg-private for forcing use of BooleanScorer in tests
BooleanScorer booleanScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
BooleanScorer booleanScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
List<BulkScorer> optional = new ArrayList<BulkScorer>();
Iterator<BooleanClause> cIter = clauses.iterator();
for (Weight w : weights) {
BooleanClause c = cIter.next();
BulkScorer subScorer = w.bulkScorer(context, acceptDocs, needsScores);
BulkScorer subScorer = w.bulkScorer(context, acceptDocs);
if (subScorer == null) {
if (c.isRequired()) {
return null;
@ -342,8 +343,8 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
final BooleanScorer bulkScorer = booleanScorer(context, acceptDocs, needsScores);
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
final BooleanScorer bulkScorer = booleanScorer(context, acceptDocs);
if (bulkScorer != null) { // BooleanScorer is applicable
// TODO: what is the right heuristic here?
final long costThreshold;
@ -366,11 +367,11 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
return bulkScorer;
}
}
return super.bulkScorer(context, acceptDocs, needsScores);
return super.bulkScorer(context, acceptDocs);
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
// initially the user provided value,
// but if minNrShouldMatch == optional.size(),
// we will optimize and move these to required, making this 0
@ -382,7 +383,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
Iterator<BooleanClause> cIter = clauses.iterator();
for (Weight w : weights) {
BooleanClause c = cIter.next();
Scorer subScorer = w.scorer(context, acceptDocs, needsScores && c.isProhibited() == false);
Scorer subScorer = w.scorer(context, acceptDocs);
if (subScorer == null) {
if (c.isRequired()) {
return null;
@ -532,8 +533,8 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BooleanWeight(searcher, disableCoord);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(searcher, needsScores, disableCoord);
}
@Override

View File

@ -110,12 +110,8 @@ public class ConstantScoreQuery extends Query {
private float queryWeight;
public ConstantWeight(IndexSearcher searcher) throws IOException {
this.innerWeight = (query == null) ? null : query.createWeight(searcher);
}
@Override
public Query getQuery() {
return ConstantScoreQuery.this;
super(ConstantScoreQuery.this);
this.innerWeight = (query == null) ? null : query.createWeight(searcher, false);
}
@Override
@ -135,13 +131,13 @@ public class ConstantScoreQuery extends Query {
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
if (filter != null) {
assert query == null;
return super.bulkScorer(context, acceptDocs, needsScores);
return super.bulkScorer(context, acceptDocs);
} else {
assert query != null && innerWeight != null;
BulkScorer bulkScorer = innerWeight.bulkScorer(context, acceptDocs, false);
BulkScorer bulkScorer = innerWeight.bulkScorer(context, acceptDocs);
if (bulkScorer == null) {
return null;
}
@ -150,7 +146,7 @@ public class ConstantScoreQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
if (filter != null) {
assert query == null;
final DocIdSet dis = filter.getDocIdSet(context, acceptDocs);
@ -163,7 +159,7 @@ public class ConstantScoreQuery extends Query {
return new ConstantDocIdSetIteratorScorer(disi, this, queryWeight);
} else {
assert query != null && innerWeight != null;
Scorer scorer = innerWeight.scorer(context, acceptDocs, false);
Scorer scorer = innerWeight.scorer(context, acceptDocs);
if (scorer == null) {
return null;
}
@ -175,7 +171,7 @@ public class ConstantScoreQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
final Scorer cs = scorer(context, context.reader().getLiveDocs(), true);
final Scorer cs = scorer(context, context.reader().getLiveDocs());
final boolean exists = (cs != null && cs.advance(doc) == doc);
final ComplexExplanation result = new ComplexExplanation();
@ -331,7 +327,7 @@ public class ConstantScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ConstantScoreQuery.ConstantWeight(searcher);
}

View File

@ -118,16 +118,13 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
protected ArrayList<Weight> weights = new ArrayList<>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts
/** Construct the Weight for this Query searched by searcher. Recursively construct subquery weights. */
public DisjunctionMaxWeight(IndexSearcher searcher) throws IOException {
public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
super(DisjunctionMaxQuery.this);
for (Query disjunctQuery : disjuncts) {
weights.add(disjunctQuery.createWeight(searcher));
weights.add(disjunctQuery.createWeight(searcher, needsScores));
}
}
/** Return our associated DisjunctionMaxQuery */
@Override
public Query getQuery() { return DisjunctionMaxQuery.this; }
/** Compute the sub of squared weights of us applied to our subqueries. Used for normalization. */
@Override
public float getValueForNormalization() throws IOException {
@ -153,11 +150,11 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/** Create the scorer used to score our associated DisjunctionMaxQuery */
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
List<Scorer> scorers = new ArrayList<>();
for (Weight w : weights) {
// we will advance() subscorers
Scorer subScorer = w.scorer(context, acceptDocs, needsScores);
Scorer subScorer = w.scorer(context, acceptDocs);
if (subScorer != null) {
scorers.add(subScorer);
}
@ -197,8 +194,8 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/** Create the Weight used to score us */
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new DisjunctionMaxWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new DisjunctionMaxWeight(searcher, needsScores);
}
/** Optimize our representation and our subqueries representations

View File

@ -78,9 +78,9 @@ public class FilteredQuery extends Query {
* This is accomplished by overriding the Scorer returned by the Weight.
*/
@Override
public Weight createWeight(final IndexSearcher searcher) throws IOException {
final Weight weight = query.createWeight (searcher);
return new Weight() {
public Weight createWeight(final IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight weight = query.createWeight (searcher, needsScores);
return new Weight(FilteredQuery.this) {
@Override
public float getValueForNormalization() throws IOException {
@ -111,15 +111,9 @@ public class FilteredQuery extends Query {
}
}
// return this query
@Override
public Query getQuery() {
return FilteredQuery.this;
}
// return a filtering scorer
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assert filter != null;
DocIdSet filterDocIdSet = filter.getDocIdSet(context, acceptDocs);
@ -128,12 +122,12 @@ public class FilteredQuery extends Query {
return null;
}
return strategy.filteredScorer(context, weight, filterDocIdSet, needsScores);
return strategy.filteredScorer(context, weight, filterDocIdSet);
}
// return a filtering top scorer
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assert filter != null;
DocIdSet filterDocIdSet = filter.getDocIdSet(context, acceptDocs);
@ -443,7 +437,7 @@ public class FilteredQuery extends Query {
* @throws IOException if an {@link IOException} occurs
*/
public abstract Scorer filteredScorer(LeafReaderContext context,
Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException;
Weight weight, DocIdSet docIdSet) throws IOException;
/**
* Returns a filtered {@link BulkScorer} based on this
@ -459,7 +453,7 @@ public class FilteredQuery extends Query {
*/
public BulkScorer filteredBulkScorer(LeafReaderContext context,
Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException {
Scorer scorer = filteredScorer(context, weight, docIdSet, needsScores);
Scorer scorer = filteredScorer(context, weight, docIdSet);
if (scorer == null) {
return null;
}
@ -481,7 +475,7 @@ public class FilteredQuery extends Query {
public static class RandomAccessFilterStrategy extends FilterStrategy {
@Override
public Scorer filteredScorer(LeafReaderContext context, Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException {
public Scorer filteredScorer(LeafReaderContext context, Weight weight, DocIdSet docIdSet) throws IOException {
final DocIdSetIterator filterIter = docIdSet.iterator();
if (filterIter == null) {
// this means the filter does not accept any documents.
@ -493,11 +487,11 @@ public class FilteredQuery extends Query {
final boolean useRandomAccess = filterAcceptDocs != null && useRandomAccess(filterAcceptDocs, filterIter.cost());
if (useRandomAccess) {
// if we are using random access, we return the inner scorer, just with other acceptDocs
return weight.scorer(context, filterAcceptDocs, needsScores);
return weight.scorer(context, filterAcceptDocs);
} else {
// we are gonna advance() this scorer, so we set inorder=true/toplevel=false
// we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice
final Scorer scorer = weight.scorer(context, null, needsScores);
final Scorer scorer = weight.scorer(context, null);
return (scorer == null) ? null : new LeapFrogScorer(weight, filterIter, scorer, scorer);
}
}
@ -530,14 +524,14 @@ public class FilteredQuery extends Query {
@Override
public Scorer filteredScorer(LeafReaderContext context,
Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException {
Weight weight, DocIdSet docIdSet) throws IOException {
final DocIdSetIterator filterIter = docIdSet.iterator();
if (filterIter == null) {
// this means the filter does not accept any documents.
return null;
}
// we pass null as acceptDocs, as our filter has already respected acceptDocs, no need to do twice
final Scorer scorer = weight.scorer(context, null, needsScores);
final Scorer scorer = weight.scorer(context, null);
if (scorer == null) {
return null;
}
@ -566,14 +560,14 @@ public class FilteredQuery extends Query {
private static final class QueryFirstFilterStrategy extends FilterStrategy {
@Override
public Scorer filteredScorer(final LeafReaderContext context,
Weight weight, DocIdSet docIdSet, boolean needsScores) throws IOException {
Weight weight, DocIdSet docIdSet) throws IOException {
Bits filterAcceptDocs = docIdSet.bits();
if (filterAcceptDocs == null) {
// Filter does not provide random-access Bits; we
// must fallback to leapfrog:
return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredScorer(context, weight, docIdSet, needsScores);
return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredScorer(context, weight, docIdSet);
}
final Scorer scorer = weight.scorer(context, null, needsScores);
final Scorer scorer = weight.scorer(context, null);
return scorer == null ? null : new QueryFirstScorer(weight, filterAcceptDocs, scorer);
}
@ -586,7 +580,7 @@ public class FilteredQuery extends Query {
// must fallback to leapfrog:
return LEAP_FROG_QUERY_FIRST_STRATEGY.filteredBulkScorer(context, weight, docIdSet, needsScores);
}
final Scorer scorer = weight.scorer(context, null, needsScores);
final Scorer scorer = weight.scorer(context, null);
return scorer == null ? null : new QueryFirstBulkScorer(scorer, filterAcceptDocs);
}
}

View File

@ -226,8 +226,53 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n) throws IOException {
return search(createNormalizedWeight(query), after, n);
public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException {
final int limit = Math.max(1, reader.maxDoc());
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc + " limit=" + limit);
}
numHits = Math.min(numHits, limit);
if (executor == null) {
final TopScoreDocCollector collector = TopScoreDocCollector.create(numHits, after);
search(query, collector);
return collector.topDocs();
} else {
final TopScoreDocCollector[] collectors = new TopScoreDocCollector[leafSlices.length];
boolean needsScores = false;
for (int i = 0; i < leafSlices.length; ++i) {
collectors[i] = TopScoreDocCollector.create(numHits, after);
needsScores |= collectors[i].needsScores();
}
final Weight weight = createNormalizedWeight(query, needsScores);
final List<Future<TopDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; ++i) {
final LeafReaderContext[] leaves = leafSlices[i].leaves;
final TopScoreDocCollector collector = collectors[i];
topDocsFutures.add(executor.submit(new Callable<TopDocs>() {
@Override
public TopDocs call() throws Exception {
search(Arrays.asList(leaves), weight, collector);
return collector.topDocs();
}
}));
}
final TopDocs[] topDocs = new TopDocs[leafSlices.length];
for (int i = 0; i < topDocs.length; ++i) {
try {
topDocs[i] = topDocsFutures.get(i).get();
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
return TopDocs.merge(numHits, topDocs);
}
}
/** Finds the top <code>n</code>
@ -242,7 +287,7 @@ public class IndexSearcher {
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), after, n);
return searchAfter(after, wrapFilter(query, filter), n);
}
/** Finds the top <code>n</code>
@ -253,7 +298,7 @@ public class IndexSearcher {
*/
public TopDocs search(Query query, int n)
throws IOException {
return search(query, null, n);
return searchAfter(null, query, n);
}
@ -265,7 +310,7 @@ public class IndexSearcher {
*/
public TopDocs search(Query query, Filter filter, int n)
throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), null, n);
return search(wrapFilter(query, filter), n);
}
/** Lower-level search API.
@ -281,7 +326,7 @@ public class IndexSearcher {
*/
public void search(Query query, Filter filter, Collector results)
throws IOException {
search(leafContexts, createNormalizedWeight(wrapFilter(query, filter)), results);
search(wrapFilter(query, filter), results);
}
/** Lower-level search API.
@ -293,7 +338,7 @@ public class IndexSearcher {
*/
public void search(Query query, Collector results)
throws IOException {
search(leafContexts, createNormalizedWeight(query), results);
search(leafContexts, createNormalizedWeight(query, results.needsScores()), results);
}
/** Search implementation with arbitrary sorting. Finds
@ -310,7 +355,7 @@ public class IndexSearcher {
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, false, false);
return search(query, filter, n, sort, false, false);
}
/** Search implementation with arbitrary sorting, plus
@ -329,7 +374,7 @@ public class IndexSearcher {
*/
public TopFieldDocs search(Query query, Filter filter, int n,
Sort sort, boolean doDocScores, boolean doMaxScore) throws IOException {
return search(createNormalizedWeight(wrapFilter(query, filter)), n, sort, doDocScores, doMaxScore);
return searchAfter(null, query, filter, n, sort, doDocScores, doMaxScore);
}
/** Finds the top <code>n</code>
@ -343,13 +388,8 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true, false, false);
public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort) throws IOException {
return searchAfter(after, query, filter, n, sort, false, false);
}
/**
@ -362,7 +402,7 @@ public class IndexSearcher {
*/
public TopFieldDocs search(Query query, int n,
Sort sort) throws IOException {
return search(createNormalizedWeight(query), n, sort, false, false);
return search(query, null, n, sort, false, false);
}
/** Finds the top <code>n</code>
@ -377,12 +417,7 @@ public class IndexSearcher {
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, int n, Sort sort) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(query), (FieldDoc) after, n, sort, true, false, false);
return searchAfter(after, query, null, n, sort, false, false);
}
/** Finds the top <code>n</code>
@ -401,123 +436,54 @@ public class IndexSearcher {
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
public TopDocs searchAfter(ScoreDoc after, Query query, Filter filter, int n, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
public TopFieldDocs searchAfter(ScoreDoc after, Query query, Filter filter, int numHits, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
if (after != null && !(after instanceof FieldDoc)) {
// TODO: if we fix type safety of TopFieldDocs we can
// remove this
throw new IllegalArgumentException("after must be a FieldDoc; got " + after);
}
return search(createNormalizedWeight(wrapFilter(query, filter)), (FieldDoc) after, n, sort, true,
doDocScores, doMaxScore);
return searchAfter((FieldDoc) after, wrapFilter(query, filter), numHits, sort, doDocScores, doMaxScore);
}
/** Expert: Low-level search implementation. Finds the top <code>n</code>
* hits for <code>query</code>, applying <code>filter</code> if non-null.
*
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
* {@link IndexSearcher#search(Query,Filter,int)} instead.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopDocs search(Weight weight, ScoreDoc after, int nDocs) throws IOException {
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
private TopFieldDocs searchAfter(FieldDoc after, Query query, int numHits, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
final int limit = Math.max(1, reader.maxDoc());
if (after != null && after.doc >= limit) {
throw new IllegalArgumentException("after.doc exceeds the number of documents in the reader: after.doc="
+ after.doc + " limit=" + limit);
}
nDocs = Math.min(nDocs, limit);
numHits = Math.min(numHits, limit);
final boolean fillFields = true;
if (executor == null) {
return search(leafContexts, weight, after, nDocs);
final TopFieldCollector collector = TopFieldCollector.create(sort, numHits, after, fillFields, doDocScores, doMaxScore);
search(query, collector);
return collector.topDocs();
} else {
final List<Future<TopDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
topDocsFutures.add(executor.submit(new SearcherCallableNoSort(this, leafSlices[i], weight, after, nDocs)));
final TopFieldCollector[] collectors = new TopFieldCollector[leafSlices.length];
boolean needsScores = false;
for (int i = 0; i < leafSlices.length; ++i) {
collectors[i] = TopFieldCollector.create(sort, numHits, after, fillFields, doDocScores, doMaxScore);
needsScores |= collectors[i].needsScores();
}
final TopDocs[] topDocs = new TopDocs[leafSlices.length];
for (int i = 0; i < leafSlices.length; i++) {
try {
topDocs[i] = topDocsFutures.get(i).get();
} catch (InterruptedException e) {
throw new ThreadInterruptedException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
return TopDocs.merge(null, nDocs, topDocs);
}
}
/** Expert: Low-level search implementation. Finds the top <code>n</code>
* hits for <code>query</code>.
*
* <p>Applications should usually call {@link IndexSearcher#search(Query,int)} or
* {@link IndexSearcher#search(Query,Filter,int)} instead.
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopDocs search(List<LeafReaderContext> leaves, Weight weight, ScoreDoc after, int nDocs) throws IOException {
// single thread
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
TopScoreDocCollector collector = TopScoreDocCollector.create(nDocs, after);
search(leaves, weight, collector);
return collector.topDocs();
}
/** Expert: Low-level search implementation with arbitrary
* sorting and control over whether hit scores and max
* score should be computed. Finds
* the top <code>n</code> hits for <code>query</code> and sorting the hits
* by the criteria in <code>sort</code>.
*
* <p>Applications should usually call {@link
* IndexSearcher#search(Query,Filter,int,Sort)} instead.
*
* @throws BooleanQuery.TooManyClauses If a query would exceed
* {@link BooleanQuery#getMaxClauseCount()} clauses.
*/
protected TopFieldDocs search(Weight weight,
final int nDocs, Sort sort,
boolean doDocScores, boolean doMaxScore) throws IOException {
return search(weight, null, nDocs, sort, true, doDocScores, doMaxScore);
}
/**
* Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields.
*/
protected TopFieldDocs search(Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields,
boolean doDocScores, boolean doMaxScore)
throws IOException {
if (sort == null) throw new NullPointerException("Sort must not be null");
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
if (executor == null) {
// use all leaves here!
return search(leafContexts, weight, after, nDocs, sort, fillFields, doDocScores, doMaxScore);
} else {
final Weight weight = createNormalizedWeight(query, needsScores);
final List<Future<TopFieldDocs>> topDocsFutures = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; i++) { // search each leaf slice
topDocsFutures.add(executor.submit(new SearcherCallableWithSort(this, leafSlices[i], weight, after, nDocs, sort, doDocScores, doMaxScore)));
for (int i = 0; i < leafSlices.length; ++i) {
final LeafReaderContext[] leaves = leafSlices[i].leaves;
final TopFieldCollector collector = collectors[i];
topDocsFutures.add(executor.submit(new Callable<TopFieldDocs>() {
@Override
public TopFieldDocs call() throws Exception {
search(Arrays.asList(leaves), weight, collector);
return collector.topDocs();
}
}));
}
final TopFieldDocs[] topDocs = new TopFieldDocs[leafSlices.length];
for (int i = 0; i < leafSlices.length; i++) {
for (int i = 0; i < topDocs.length; ++i) {
try {
topDocs[i] = topDocsFutures.get(i).get();
} catch (InterruptedException e) {
@ -526,30 +492,9 @@ public class IndexSearcher {
throw new RuntimeException(e);
}
}
return (TopFieldDocs) TopDocs.merge(sort, nDocs, topDocs);
}
}
/**
* Just like {@link #search(Weight, int, Sort, boolean, boolean)}, but you choose
* whether or not the fields in the returned {@link FieldDoc} instances should
* be set by specifying fillFields.
*/
protected TopFieldDocs search(List<LeafReaderContext> leaves, Weight weight, FieldDoc after, int nDocs,
Sort sort, boolean fillFields, boolean doDocScores, boolean doMaxScore) throws IOException {
// single thread
int limit = reader.maxDoc();
if (limit == 0) {
limit = 1;
}
nDocs = Math.min(nDocs, limit);
TopFieldCollector collector = TopFieldCollector.create(sort, nDocs, after,
fillFields, doDocScores,
doMaxScore);
search(leaves, weight, collector);
return (TopFieldDocs) collector.topDocs();
return TopDocs.merge(sort, numHits, topDocs);
}
}
/**
@ -586,7 +531,7 @@ public class IndexSearcher {
// continue with the following leaf
continue;
}
BulkScorer scorer = weight.bulkScorer(ctx, ctx.reader().getLiveDocs(), collector.needsScores());
BulkScorer scorer = weight.bulkScorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
try {
scorer.score(leafCollector);
@ -620,7 +565,7 @@ public class IndexSearcher {
* entire index.
*/
public Explanation explain(Query query, int doc) throws IOException {
return explain(createNormalizedWeight(query), doc);
return explain(createNormalizedWeight(query, true), doc);
}
/** Expert: low-level implementation method
@ -650,9 +595,9 @@ public class IndexSearcher {
* can then directly be used to get a {@link Scorer}.
* @lucene.internal
*/
public Weight createNormalizedWeight(Query query) throws IOException {
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
query = rewrite(query);
Weight weight = query.createWeight(this);
Weight weight = query.createWeight(this, needsScores);
float v = weight.getValueForNormalization();
float norm = getSimilarity().queryNorm(v);
if (Float.isInfinite(norm) || Float.isNaN(norm)) {
@ -671,68 +616,6 @@ public class IndexSearcher {
return readerContext;
}
/**
* A thread subclass for searching a single searchable
*/
private static final class SearcherCallableNoSort implements Callable<TopDocs> {
private final IndexSearcher searcher;
private final Weight weight;
private final ScoreDoc after;
private final int nDocs;
private final LeafSlice slice;
public SearcherCallableNoSort(IndexSearcher searcher, LeafSlice slice, Weight weight,
ScoreDoc after, int nDocs) {
this.searcher = searcher;
this.weight = weight;
this.after = after;
this.nDocs = nDocs;
this.slice = slice;
}
@Override
public TopDocs call() throws IOException {
return searcher.search(Arrays.asList(slice.leaves), weight, after, nDocs);
}
}
/**
* A thread subclass for searching a single searchable
*/
private static final class SearcherCallableWithSort implements Callable<TopFieldDocs> {
private final IndexSearcher searcher;
private final Weight weight;
private final int nDocs;
private final Sort sort;
private final LeafSlice slice;
private final FieldDoc after;
private final boolean doDocScores;
private final boolean doMaxScore;
public SearcherCallableWithSort(IndexSearcher searcher, LeafSlice slice, Weight weight,
FieldDoc after, int nDocs, Sort sort,
boolean doDocScores, boolean doMaxScore) {
this.searcher = searcher;
this.weight = weight;
this.nDocs = nDocs;
this.sort = sort;
this.slice = slice;
this.after = after;
this.doDocScores = doDocScores;
this.doMaxScore = doMaxScore;
}
@Override
public TopFieldDocs call() throws IOException {
assert slice.leaves.length == 1;
return searcher.search(Arrays.asList(slice.leaves),
weight, after, nDocs, sort, true, doDocScores, doMaxScore);
}
}
/**
* A class holding a subset of the {@link IndexSearcher}s leaf contexts to be
* executed within a single thread.

View File

@ -110,6 +110,7 @@ public class MatchAllDocsQuery extends Query {
private float queryNorm;
public MatchAllDocsWeight(IndexSearcher searcher) {
super(MatchAllDocsQuery.this);
}
@Override
@ -117,11 +118,6 @@ public class MatchAllDocsQuery extends Query {
return "weight(" + MatchAllDocsQuery.this + ")";
}
@Override
public Query getQuery() {
return MatchAllDocsQuery.this;
}
@Override
public float getValueForNormalization() {
queryWeight = getBoost();
@ -135,7 +131,7 @@ public class MatchAllDocsQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new MatchAllScorer(context.reader(), acceptDocs, this, queryWeight);
}
@ -154,7 +150,7 @@ public class MatchAllDocsQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
return new MatchAllDocsWeight(searcher);
}

View File

@ -141,9 +141,12 @@ public class MultiPhraseQuery extends Query {
private final Similarity similarity;
private final Similarity.SimWeight stats;
private final Map<Term,TermContext> termContexts = new HashMap<>();
private final boolean needsScores;
public MultiPhraseWeight(IndexSearcher searcher)
public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores)
throws IOException {
super(MultiPhraseQuery.this);
this.needsScores = needsScores;
this.similarity = searcher.getSimilarity();
final IndexReaderContext context = searcher.getTopReaderContext();
@ -164,9 +167,6 @@ public class MultiPhraseQuery extends Query {
allTermStats.toArray(new TermStatistics[allTermStats.size()]));
}
@Override
public Query getQuery() { return MultiPhraseQuery.this; }
@Override
public float getValueForNormalization() {
return stats.getValueForNormalization();
@ -178,7 +178,7 @@ public class MultiPhraseQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assert !termArrays.isEmpty();
final LeafReader reader = context.reader();
final Bits liveDocs = acceptDocs;
@ -256,7 +256,7 @@ public class MultiPhraseQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context, context.reader().getLiveDocs(), true);
Scorer scorer = scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {
@ -296,8 +296,8 @@ public class MultiPhraseQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new MultiPhraseWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new MultiPhraseWeight(searcher, needsScores);
}
/** Prints a user-readable version of this query. */

View File

@ -211,10 +211,13 @@ public class PhraseQuery extends Query {
private class PhraseWeight extends Weight {
private final Similarity similarity;
private final Similarity.SimWeight stats;
private final boolean needsScores;
private transient TermContext states[];
public PhraseWeight(IndexSearcher searcher)
public PhraseWeight(IndexSearcher searcher, boolean needsScores)
throws IOException {
super(PhraseQuery.this);
this.needsScores = needsScores;
this.similarity = searcher.getSimilarity();
final IndexReaderContext context = searcher.getTopReaderContext();
states = new TermContext[terms.size()];
@ -230,9 +233,6 @@ public class PhraseQuery extends Query {
@Override
public String toString() { return "weight(" + PhraseQuery.this + ")"; }
@Override
public Query getQuery() { return PhraseQuery.this; }
@Override
public float getValueForNormalization() {
return stats.getValueForNormalization();
@ -244,7 +244,7 @@ public class PhraseQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assert !terms.isEmpty();
final LeafReader reader = context.reader();
final Bits liveDocs = acceptDocs;
@ -297,7 +297,7 @@ public class PhraseQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context, context.reader().getLiveDocs(), true);
Scorer scorer = scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {
@ -318,8 +318,8 @@ public class PhraseQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new PhraseWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new PhraseWeight(searcher, needsScores);
}
/**

View File

@ -18,7 +18,6 @@ package org.apache.lucene.search;
*/
import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.IndexReader;
@ -72,11 +71,13 @@ public abstract class Query implements Cloneable {
/**
* Expert: Constructs an appropriate Weight implementation for this query.
*
* <p>
* Only implemented by primitive queries, which re-write to themselves.
*
* @param needsScores True if document scores ({@link Scorer#score}) or match
* frequencies ({@link Scorer#freq}) are needed.
*/
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
throw new UnsupportedOperationException("Query " + this + " does not implement createWeight");
}

View File

@ -61,7 +61,7 @@ public abstract class QueryRescorer extends Rescorer {
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
Weight weight = searcher.createNormalizedWeight(query);
Weight weight = searcher.createNormalizedWeight(query, true);
// Now merge sort docIDs from hits, with reader's leaves:
int hitUpto = 0;
@ -83,7 +83,7 @@ public abstract class QueryRescorer extends Rescorer {
if (readerContext != null) {
// We advanced to another segment:
docBase = readerContext.docBase;
scorer = weight.scorer(readerContext, null, true);
scorer = weight.scorer(readerContext, null);
}
if(scorer != null) {

View File

@ -53,11 +53,11 @@ public class QueryWrapperFilter extends Filter {
public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException {
// get a private context that is used to rewrite, createWeight and score eventually
final LeafReaderContext privateContext = context.reader().getContext();
final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query);
final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, false);
return new DocIdSet() {
@Override
public DocIdSetIterator iterator() throws IOException {
return weight.scorer(privateContext, acceptDocs, false);
return weight.scorer(privateContext, acceptDocs);
}
@Override

View File

@ -47,9 +47,12 @@ public class TermQuery extends Query {
private final Similarity similarity;
private final Similarity.SimWeight stats;
private final TermContext termStates;
public TermWeight(IndexSearcher searcher, TermContext termStates)
private final boolean needsScores;
public TermWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates)
throws IOException {
super(TermQuery.this);
this.needsScores = needsScores;
assert termStates != null : "TermContext must not be null";
this.termStates = termStates;
this.similarity = searcher.getSimilarity();
@ -63,11 +66,6 @@ public class TermQuery extends Query {
return "weight(" + TermQuery.this + ")";
}
@Override
public Query getQuery() {
return TermQuery.this;
}
@Override
public float getValueForNormalization() {
return stats.getValueForNormalization();
@ -79,7 +77,7 @@ public class TermQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
final TermsEnum termsEnum = getTermsEnum(context);
if (termsEnum == null) {
@ -117,7 +115,7 @@ public class TermQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context, context.reader().getLiveDocs(), true);
Scorer scorer = scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {
@ -170,7 +168,7 @@ public class TermQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final IndexReaderContext context = searcher.getTopReaderContext();
final TermContext termState;
if (perReaderTermState == null
@ -186,7 +184,7 @@ public class TermQuery extends Query {
// we must not ignore the given docFreq - if set use the given value (lie)
if (docFreq != -1) termState.setDocFreq(docFreq);
return new TermWeight(searcher, termState);
return new TermWeight(searcher, needsScores, termState);
}
@Override

View File

@ -158,7 +158,6 @@ public class TopDocs {
// Returns true if first is < second
@Override
@SuppressWarnings({"unchecked","rawtypes"})
public boolean lessThan(ShardRef first, ShardRef second) {
assert first != second;
final FieldDoc firstFD = (FieldDoc) shardHits[first.shardIndex][first.hitIndex];
@ -195,25 +194,48 @@ public class TopDocs {
}
/** Returns a new TopDocs, containing topN results across
* the provided TopDocs, sorting by the specified {@link
* the provided TopDocs, sorting by score. Each {@link TopDocs}
* instance must be sorted.
* @lucene.experimental */
public static TopDocs merge(int topN, TopDocs[] shardHits) throws IOException {
return merge(0, topN, shardHits);
}
/**
* Same as {@link #merge(int, TopDocs[])} but also ignores the top
* {@code start} top docs. This is typically useful for pagination.
* @lucene.experimental
*/
public static TopDocs merge(int start, int topN, TopDocs[] shardHits) throws IOException {
return mergeAux(null, start, topN, shardHits);
}
/** Returns a new TopFieldDocs, containing topN results across
* the provided TopFieldDocs, sorting by the specified {@link
* Sort}. Each of the TopDocs must have been sorted by
* the same Sort, and sort field values must have been
* filled (ie, <code>fillFields=true</code> must be
* passed to {@link
* TopFieldCollector#create}.
*
* <p>Pass sort=null to merge sort by score descending.
*
* passed to {@link TopFieldCollector#create}).
* @lucene.experimental */
public static TopDocs merge(Sort sort, int topN, TopDocs[] shardHits) throws IOException {
public static TopFieldDocs merge(Sort sort, int topN, TopFieldDocs[] shardHits) throws IOException {
return merge(sort, 0, topN, shardHits);
}
/**
* Same as {@link #merge(Sort, int, TopDocs[])} but also slices the result at the same time based
* on the provided start and size. The return TopDocs will always have a scoreDocs with length of at most size.
* Same as {@link #merge(Sort, int, TopFieldDocs[])} but also ignores the top
* {@code start} top docs. This is typically useful for pagination.
* @lucene.experimental
*/
public static TopDocs merge(Sort sort, int start, int size, TopDocs[] shardHits) throws IOException {
public static TopFieldDocs merge(Sort sort, int start, int topN, TopFieldDocs[] shardHits) throws IOException {
if (sort == null) {
throw new IllegalArgumentException("sort must be non-null when merging field-docs");
}
return (TopFieldDocs) mergeAux(sort, start, topN, shardHits);
}
/** Auxiliary method used by the {@link #merge} impls. A sort value of null
* is used to indicate that docs should be sorted by score. */
private static TopDocs mergeAux(Sort sort, int start, int size, TopDocs[] shardHits) throws IOException {
final PriorityQueue<ShardRef> queue;
if (sort == null) {
queue = new ScoreMergeSortQueue(shardHits);

View File

@ -698,4 +698,8 @@ public abstract class TopFieldCollector extends TopDocsCollector<Entry> {
return new TopFieldDocs(totalHits, results, ((FieldValueHitQueue<Entry>) pq).getFields(), maxScore);
}
@Override
public TopFieldDocs topDocs() {
return (TopFieldDocs) super.topDocs();
}
}

View File

@ -34,14 +34,14 @@ import org.apache.lucene.util.Bits;
* {@link org.apache.lucene.index.LeafReader} dependent state should reside in the {@link Scorer}.
* <p>
* Since {@link Weight} creates {@link Scorer} instances for a given
* {@link org.apache.lucene.index.LeafReaderContext} ({@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits, boolean)})
* {@link org.apache.lucene.index.LeafReaderContext} ({@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits)})
* callers must maintain the relationship between the searcher's top-level
* {@link IndexReaderContext} and the context used to create a {@link Scorer}.
* <p>
* A <code>Weight</code> is used in the following way:
* <ol>
* <li>A <code>Weight</code> is constructed by a top-level query, given a
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher)}).
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean)}).
* <li>The {@link #getValueForNormalization()} method is called on the
* <code>Weight</code> to compute the query normalization factor
* {@link Similarity#queryNorm(float)} of the query clauses contained in the
@ -49,13 +49,22 @@ import org.apache.lucene.util.Bits;
* <li>The query normalization factor is passed to {@link #normalize(float, float)}. At
* this point the weighting is complete.
* <li>A <code>Scorer</code> is constructed by
* {@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits, boolean)}.
* {@link #scorer(org.apache.lucene.index.LeafReaderContext, Bits)}.
* </ol>
*
* @since 2.9
*/
public abstract class Weight {
protected final Query parentQuery;
/** Sole constructor, typically invoked by sub-classes.
* @param query the parent query
*/
protected Weight(Query query) {
this.parentQuery = query;
}
/**
* An explanation of the score computation for the named document.
*
@ -67,7 +76,9 @@ public abstract class Weight {
public abstract Explanation explain(LeafReaderContext context, int doc) throws IOException;
/** The query that this concerns. */
public abstract Query getQuery();
public final Query getQuery() {
return parentQuery;
}
/** The value for normalization of contained query clauses (e.g. sum of squared weights). */
public abstract float getValueForNormalization() throws IOException;
@ -87,13 +98,11 @@ public abstract class Weight {
* @param acceptDocs
* Bits that represent the allowable docs to match (typically deleted docs
* but possibly filtering other documents)
* @param needsScores
* True if document scores ({@link Scorer#score}) or match frequencies ({@link Scorer#freq}) are needed.
*
* @return a {@link Scorer} which scores documents in/out-of order.
* @throws IOException if there is a low-level I/O error
*/
public abstract Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException;
public abstract Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException;
/**
* Optional method, to return a {@link BulkScorer} to
@ -108,16 +117,14 @@ public abstract class Weight {
* @param acceptDocs
* Bits that represent the allowable docs to match (typically deleted docs
* but possibly filtering other documents)
* @param needsScores
* True if document scores are needed.
*
* @return a {@link BulkScorer} which scores documents and
* passes them to a collector.
* @throws IOException if there is a low-level I/O error
*/
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer scorer = scorer(context, acceptDocs, needsScores);
Scorer scorer = scorer(context, acceptDocs);
if (scorer == null) {
// No docs match
return null;

View File

@ -389,7 +389,7 @@ on the built-in available scoring models and extending or changing Similarity.
{@link org.apache.lucene.search.Query Query} class has several methods that are important for
derived classes:
<ol>
<li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher) createWeight(IndexSearcher searcher)} &mdash; A
<li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)} &mdash; A
{@link org.apache.lucene.search.Weight Weight} is the internal representation of the
Query, so each Query implementation must
provide an implementation of Weight. See the subsection on <a
@ -398,7 +398,7 @@ on the built-in available scoring models and extending or changing Similarity.
<li>{@link org.apache.lucene.search.Query#rewrite(IndexReader) rewrite(IndexReader reader)} &mdash; Rewrites queries into primitive queries. Primitive queries are:
{@link org.apache.lucene.search.TermQuery TermQuery},
{@link org.apache.lucene.search.BooleanQuery BooleanQuery}, <span
>and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher) createWeight(IndexSearcher searcher)}</span></li>
>and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)}</span></li>
</ol>
</p>
<a name="weightClass"></a>
@ -546,8 +546,8 @@ on the built-in available scoring models and extending or changing Similarity.
<p>Assuming we are not sorting (since sorting doesn't affect the raw Lucene score),
we call one of the search methods of the IndexSearcher, passing in the
{@link org.apache.lucene.search.Weight Weight} object created by
{@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query)
IndexSearcher.createNormalizedWeight(Query)},
{@link org.apache.lucene.search.IndexSearcher#createNormalizedWeight(org.apache.lucene.search.Query,boolean)
IndexSearcher.createNormalizedWeight(Query,boolean)},
{@link org.apache.lucene.search.Filter Filter} and the number of results we want.
This method returns a {@link org.apache.lucene.search.TopDocs TopDocs} object,
which is an internal collection of search results. The IndexSearcher creates

View File

@ -71,7 +71,7 @@ public class PayloadNearQuery extends SpanNearQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new PayloadNearSpanWeight(this, searcher);
}
@ -148,14 +148,14 @@ public class PayloadNearQuery extends SpanNearQuery {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new PayloadNearSpanScorer(query.getSpans(context, acceptDocs, termContexts), this,
similarity, similarity.simScorer(stats, context));
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
PayloadNearSpanScorer scorer = (PayloadNearSpanScorer) scorer(context, context.reader().getLiveDocs(), true);
PayloadNearSpanScorer scorer = (PayloadNearSpanScorer) scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {

View File

@ -67,7 +67,7 @@ public class PayloadTermQuery extends SpanTermQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new PayloadTermWeight(this, searcher);
}
@ -79,7 +79,7 @@ public class PayloadTermQuery extends SpanTermQuery {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new PayloadTermSpanScorer((TermSpans) query.getSpans(context, acceptDocs, termContexts),
this, similarity.simScorer(stats, context));
}
@ -176,7 +176,7 @@ public class PayloadTermQuery extends SpanTermQuery {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
PayloadTermSpanScorer scorer = (PayloadTermSpanScorer) scorer(context, context.reader().getLiveDocs(), true);
PayloadTermSpanScorer scorer = (PayloadTermSpanScorer) scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {

View File

@ -106,8 +106,8 @@ public class FieldMaskingSpanQuery extends SpanQuery {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return maskedQuery.createWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return maskedQuery.createWeight(searcher, needsScores);
}
@Override

View File

@ -42,7 +42,7 @@ public abstract class SpanQuery extends Query {
public abstract String getField();
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new SpanWeight(this, searcher);
}

View File

@ -41,13 +41,14 @@ import org.apache.lucene.util.Bits;
* Expert-only. Public for use by other weight implementations
*/
public class SpanWeight extends Weight {
protected Similarity similarity;
protected Map<Term,TermContext> termContexts;
protected SpanQuery query;
protected final Similarity similarity;
protected final Map<Term,TermContext> termContexts;
protected final SpanQuery query;
protected Similarity.SimWeight stats;
public SpanWeight(SpanQuery query, IndexSearcher searcher)
throws IOException {
super(query);
this.similarity = searcher.getSimilarity();
this.query = query;
@ -71,9 +72,6 @@ public class SpanWeight extends Weight {
}
}
@Override
public Query getQuery() { return query; }
@Override
public float getValueForNormalization() throws IOException {
return stats == null ? 1.0f : stats.getValueForNormalization();
@ -87,7 +85,7 @@ public class SpanWeight extends Weight {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
if (stats == null) {
return null;
} else {
@ -97,7 +95,7 @@ public class SpanWeight extends Weight {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
SpanScorer scorer = (SpanScorer) scorer(context, context.reader().getLiveDocs(), true);
SpanScorer scorer = (SpanScorer) scorer(context, context.reader().getLiveDocs());
if (scorer != null) {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {

View File

@ -287,13 +287,12 @@ final class JustCompileSearch {
static final class JustCompileWeight extends Weight {
@Override
public Explanation explain(LeafReaderContext context, int doc) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
protected JustCompileWeight() {
super(null);
}
@Override
public Query getQuery() {
public Explanation explain(LeafReaderContext context, int doc) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
@ -308,7 +307,7 @@ final class JustCompileSearch {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}

View File

@ -706,15 +706,15 @@ public class TestBooleanCoord extends LuceneTestCase {
/** asserts score for our single matching good doc */
private void assertScore(final float expected, Query query) throws Exception {
// test in-order
Weight weight = searcher.createNormalizedWeight(query);
Scorer scorer = weight.scorer(reader.leaves().get(0), null, true);
Weight weight = searcher.createNormalizedWeight(query, true);
Scorer scorer = weight.scorer(reader.leaves().get(0), null);
assertTrue(scorer.docID() == -1 || scorer.docID() == DocIdSetIterator.NO_MORE_DOCS);
assertEquals(0, scorer.nextDoc());
assertEquals(expected, scorer.score(), 0.0001f);
// test bulk scorer
final AtomicBoolean seen = new AtomicBoolean(false);
BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), null, true);
BulkScorer bulkScorer = weight.bulkScorer(reader.leaves().get(0), null);
assertNotNull(bulkScorer);
bulkScorer.score(new LeafCollector() {
Scorer scorer;

View File

@ -185,10 +185,10 @@ public class TestBooleanOr extends LuceneTestCase {
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
bq.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
Weight w = s.createNormalizedWeight(bq);
Weight w = s.createNormalizedWeight(bq, true);
assertEquals(1, s.getIndexReader().leaves().size());
BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null, true);
BulkScorer scorer = w.bulkScorer(s.getIndexReader().leaves().get(0), null);
final FixedBitSet hits = new FixedBitSet(docCount);
final AtomicInteger end = new AtomicInteger();

View File

@ -231,9 +231,9 @@ public class TestBooleanQuery extends LuceneTestCase {
q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD));
}
Weight weight = s.createNormalizedWeight(q);
Weight weight = s.createNormalizedWeight(q, true);
Scorer scorer = weight.scorer(s.leafContexts.get(0), null, true);
Scorer scorer = weight.scorer(s.leafContexts.get(0), null);
// First pass: just use .nextDoc() to gather all hits
final List<ScoreDoc> hits = new ArrayList<>();
@ -249,8 +249,8 @@ public class TestBooleanQuery extends LuceneTestCase {
// verify exact match:
for(int iter2=0;iter2<10;iter2++) {
weight = s.createNormalizedWeight(q);
scorer = weight.scorer(s.leafContexts.get(0), null, true);
weight = s.createNormalizedWeight(q, true);
scorer = weight.scorer(s.leafContexts.get(0), null);
if (VERBOSE) {
System.out.println(" iter2=" + iter2);

View File

@ -282,11 +282,11 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
static class BooleanQuery2 extends BooleanQuery {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BooleanWeight(searcher, false) {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(searcher, needsScores, false) {
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
Scorer scorer = scorer(context, acceptDocs, needsScores);
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer scorer = scorer(context, acceptDocs);
if (scorer == null) {
return null;
}

View File

@ -70,18 +70,13 @@ public class TestBooleanScorer extends LuceneTestCase {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new Weight() {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new Weight(CrazyMustUseBulkScorerQuery.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) {
throw new UnsupportedOperationException();
}
@Override
public Query getQuery() {
return CrazyMustUseBulkScorerQuery.this;
}
@Override
public float getValueForNormalization() {
return 1.0f;
@ -92,12 +87,12 @@ public class TestBooleanScorer extends LuceneTestCase {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) {
throw new UnsupportedOperationException();
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) {
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) {
return new BulkScorer() {
@Override
public int score(LeafCollector collector, int min, int max) throws IOException {

View File

@ -178,9 +178,9 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
QueryUtils.check(random(), dq, s);
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
final Weight dw = s.createNormalizedWeight(dq);
final Weight dw = s.createNormalizedWeight(dq, true);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs(), true);
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs());
final boolean skipOk = ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS;
if (skipOk) {
fail("firsttime skipTo found a match? ... "
@ -194,9 +194,9 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
dq.add(tq("dek", "DOES_NOT_EXIST"));
assertTrue(s.getTopReaderContext() instanceof LeafReaderContext);
QueryUtils.check(random(), dq, s);
final Weight dw = s.createNormalizedWeight(dq);
final Weight dw = s.createNormalizedWeight(dq, true);
LeafReaderContext context = (LeafReaderContext)s.getTopReaderContext();
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs(), true);
final Scorer ds = dw.scorer(context, context.reader().getLiveDocs());
assertTrue("firsttime skipTo found no match",
ds.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
assertEquals("found wrong docid", "d4", r.document(ds.docID()).get("id"));

View File

@ -125,17 +125,17 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
}
bq.setMinimumNumberShouldMatch(minShouldMatch);
BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq);
BooleanWeight weight = (BooleanWeight) searcher.createNormalizedWeight(bq, true);
switch (mode) {
case DOC_VALUES:
return new SlowMinShouldMatchScorer(weight, reader, searcher);
case SCORER:
return weight.scorer(reader.getContext(), null, true);
return weight.scorer(reader.getContext(), null);
case BULK_SCORER:
final BulkScorer bulkScorer = weight.booleanScorer(reader.getContext(), null, true);
final BulkScorer bulkScorer = weight.booleanScorer(reader.getContext(), null);
if (bulkScorer == null) {
if (weight.scorer(reader.getContext(), null, true) != null) {
if (weight.scorer(reader.getContext(), null) != null) {
throw new AssertionError("BooleanScorer should be applicable for this query");
}
return null;

View File

@ -344,7 +344,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
MultiPhraseQuery query = new MultiPhraseQuery();
query.add(new Term[] { new Term("body", "this"), new Term("body", "that") });
query.add(new Term("body", "is"));
Weight weight = query.createWeight(searcher);
Weight weight = query.createWeight(searcher, true);
assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f);
writer.close();

View File

@ -109,19 +109,14 @@ public class TestNeedsScores extends LuceneTestCase {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
final Weight w = in.createWeight(searcher);
return new Weight() {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight w = in.createWeight(searcher, needsScores);
return new Weight(AssertNeedsScores.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return w.explain(context, doc);
}
@Override
public Query getQuery() {
return AssertNeedsScores.this;
}
@Override
public float getValueForNormalization() throws IOException {
return w.getValueForNormalization();
@ -133,9 +128,9 @@ public class TestNeedsScores extends LuceneTestCase {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
assertEquals("query=" + in, value, needsScores);
return w.scorer(context, acceptDocs, needsScores);
return w.scorer(context, acceptDocs);
}
};
}

View File

@ -107,7 +107,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
Scorer s = new SimpleScorer(fake);
TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length);
Collector c = new PositiveScoresOnlyCollector(tdc);

View File

@ -425,14 +425,9 @@ public class TestQueryRescorer extends LuceneTestCase {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new Weight() {
@Override
public Query getQuery() {
return FixedScoreQuery.this;
}
return new Weight(FixedScoreQuery.this) {
@Override
public float getValueForNormalization() {
@ -444,7 +439,7 @@ public class TestQueryRescorer extends LuceneTestCase {
}
@Override
public Scorer scorer(final LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(final LeafReaderContext context, Bits acceptDocs) throws IOException {
return new Scorer(null) {
int docID = -1;

View File

@ -130,7 +130,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher);
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
Scorer s = new SimpleScorer(fake);
ScoreCachingCollector scc = new ScoreCachingCollector(scores.length);
scc.setScorer(s);

View File

@ -75,10 +75,10 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext)indexSearcher.getTopReaderContext();
BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs(), true);
BulkScorer ts = weight.bulkScorer(context, context.reader().getLiveDocs());
// we have 2 documents with the term all in them, one document for all the
// other values
final List<TestHit> docs = new ArrayList<>();
@ -137,10 +137,10 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
Scorer ts = weight.scorer(context, context.reader().getLiveDocs(), true);
Scorer ts = weight.scorer(context, context.reader().getLiveDocs());
assertTrue("next did not return a doc",
ts.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertTrue("score is not correct", ts.score() == 1.6931472f);
@ -156,10 +156,10 @@ public class TestTermScorer extends LuceneTestCase {
Term allTerm = new Term(FIELD, "all");
TermQuery termQuery = new TermQuery(allTerm);
Weight weight = indexSearcher.createNormalizedWeight(termQuery);
Weight weight = indexSearcher.createNormalizedWeight(termQuery, true);
assertTrue(indexSearcher.getTopReaderContext() instanceof LeafReaderContext);
LeafReaderContext context = (LeafReaderContext) indexSearcher.getTopReaderContext();
Scorer ts = weight.scorer(context, context.reader().getLiveDocs(), true);
Scorer ts = weight.scorer(context, context.reader().getLiveDocs());
assertTrue("Didn't skip", ts.advance(3) != DocIdSetIterator.NO_MORE_DOCS);
// The next doc should be doc 5
assertTrue("doc should be number 5", ts.docID() == 5);

View File

@ -20,8 +20,6 @@ package org.apache.lucene.search;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatDocValuesField;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.LeafReaderContext;
@ -56,8 +54,9 @@ public class TestTopDocsMerge extends LuceneTestCase {
}
public TopDocs search(Weight weight, int topN) throws IOException {
return search(ctx, weight, null, topN);
}
TopScoreDocCollector collector = TopScoreDocCollector.create(topN);
search(ctx, weight, collector);
return collector.topDocs(); }
@Override
public String toString() {
@ -252,9 +251,14 @@ public class TestTopDocsMerge extends LuceneTestCase {
}
// ... then all shards:
final Weight w = searcher.createNormalizedWeight(query);
final Weight w = searcher.createNormalizedWeight(query, true);
final TopDocs[] shardHits = new TopDocs[subSearchers.length];
final TopDocs[] shardHits;
if (sort == null) {
shardHits = new TopDocs[subSearchers.length];
} else {
shardHits = new TopFieldDocs[subSearchers.length];
}
for(int shardIDX=0;shardIDX<subSearchers.length;shardIDX++) {
final TopDocs subHits;
final ShardSearcher subSearcher = subSearchers[shardIDX];
@ -280,9 +284,17 @@ public class TestTopDocsMerge extends LuceneTestCase {
// Merge:
final TopDocs mergedHits;
if (useFrom) {
mergedHits = TopDocs.merge(sort, from, size, shardHits);
if (sort == null) {
mergedHits = TopDocs.merge(from, size, shardHits);
} else {
mergedHits = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardHits);
}
} else {
mergedHits = TopDocs.merge(sort, numHits, shardHits);
if (sort == null) {
mergedHits = TopDocs.merge(numHits, shardHits);
} else {
mergedHits = TopDocs.merge(sort, numHits, (TopFieldDocs[]) shardHits);
}
}
if (mergedHits.scoreDocs != null) {

View File

@ -181,10 +181,10 @@ public class TestNearSpansOrdered extends LuceneTestCase {
*/
public void testSpanNearScorerSkipTo1() throws Exception {
SpanNearQuery q = makeQuery();
Weight w = searcher.createNormalizedWeight(q);
Weight w = searcher.createNormalizedWeight(q, true);
IndexReaderContext topReaderContext = searcher.getTopReaderContext();
LeafReaderContext leave = topReaderContext.leaves().get(0);
Scorer s = w.scorer(leave, leave.reader().getLiveDocs(), true);
Scorer s = w.scorer(leave, leave.reader().getLiveDocs());
assertEquals(1, s.advance(1));
}

View File

@ -430,7 +430,7 @@ public class TestSpans extends LuceneTestCase {
slop,
ordered);
spanScorer = searcher.createNormalizedWeight(snq).scorer(ctx, ctx.reader().getLiveDocs(), true);
spanScorer = searcher.createNormalizedWeight(snq, true).scorer(ctx, ctx.reader().getLiveDocs());
} finally {
searcher.setSimilarity(oldSim);
}

View File

@ -73,8 +73,8 @@ class DrillSidewaysQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
final Weight baseWeight = baseQuery.createWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight baseWeight = baseQuery.createWeight(searcher, needsScores);
final Object[] drillDowns = new Object[drillDownQueries.length];
for(int dim=0;dim<drillDownQueries.length;dim++) {
Query query = drillDownQueries[dim];
@ -84,21 +84,16 @@ class DrillSidewaysQuery extends Query {
} else {
// TODO: would be nice if we could say "we will do no
// scoring" here....
drillDowns[dim] = searcher.rewrite(query).createWeight(searcher);
drillDowns[dim] = searcher.rewrite(query).createWeight(searcher, needsScores);
}
}
return new Weight() {
return new Weight(DrillSidewaysQuery.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return baseWeight.explain(context, doc);
}
@Override
public Query getQuery() {
return baseQuery;
}
@Override
public float getValueForNormalization() throws IOException {
return baseWeight.getValueForNormalization();
@ -110,17 +105,17 @@ class DrillSidewaysQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
// We can only run as a top scorer:
throw new UnsupportedOperationException();
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
// TODO: it could be better if we take acceptDocs
// into account instead of baseScorer?
Scorer baseScorer = baseWeight.scorer(context, acceptDocs, needsScores);
Scorer baseScorer = baseWeight.scorer(context, acceptDocs);
DrillSidewaysScorer.DocsAndCost[] dims = new DrillSidewaysScorer.DocsAndCost[drillDowns.length];
int nullCount = 0;
@ -165,7 +160,7 @@ class DrillSidewaysQuery extends Query {
dims[dim].disi = disi;
}
} else {
DocIdSetIterator disi = ((Weight) drillDowns[dim]).scorer(context, null, needsScores);
DocIdSetIterator disi = ((Weight) drillDowns[dim]).scorer(context, null);
if (disi == null) {
nullCount++;
continue;

View File

@ -21,6 +21,7 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import java.io.IOException;
@ -130,7 +131,12 @@ public class TopGroups<GROUP_VALUE_TYPE> {
@SuppressWarnings({"unchecked","rawtypes"})
final GroupDocs<T>[] mergedGroupDocs = new GroupDocs[numGroups];
final TopDocs[] shardTopDocs = new TopDocs[shardGroups.length];
final TopDocs[] shardTopDocs;
if (docSort == null) {
shardTopDocs = new TopDocs[shardGroups.length];
} else {
shardTopDocs = new TopFieldDocs[shardGroups.length];
}
float totalMaxScore = Float.MIN_VALUE;
for(int groupIDX=0;groupIDX<numGroups;groupIDX++) {
@ -157,15 +163,27 @@ public class TopGroups<GROUP_VALUE_TYPE> {
}
*/
shardTopDocs[shardIDX] = new TopDocs(shardGroupDocs.totalHits,
shardGroupDocs.scoreDocs,
shardGroupDocs.maxScore);
if (docSort == null) {
shardTopDocs[shardIDX] = new TopDocs(shardGroupDocs.totalHits,
shardGroupDocs.scoreDocs,
shardGroupDocs.maxScore);
} else {
shardTopDocs[shardIDX] = new TopFieldDocs(shardGroupDocs.totalHits,
shardGroupDocs.scoreDocs,
docSort.getSort(),
shardGroupDocs.maxScore);
}
maxScore = Math.max(maxScore, shardGroupDocs.maxScore);
totalHits += shardGroupDocs.totalHits;
scoreSum += shardGroupDocs.score;
}
final TopDocs mergedTopDocs = TopDocs.merge(docSort, docOffset + docTopN, shardTopDocs);
final TopDocs mergedTopDocs;
if (docSort == null) {
mergedTopDocs = TopDocs.merge(docOffset + docTopN, shardTopDocs);
} else {
mergedTopDocs = TopDocs.merge(docSort, docOffset + docTopN, (TopFieldDocs[]) shardTopDocs);
}
// Slice;
final ScoreDoc[] mergedScoreDocs;

View File

@ -1175,7 +1175,7 @@ public class TestGrouping extends LuceneTestCase {
System.out.println("TEST: " + subSearchers.length + " shards: " + Arrays.toString(subSearchers) + " canUseIDV=" + canUseIDV);
}
// Run 1st pass collector to get top groups per shard
final Weight w = topSearcher.createNormalizedWeight(query);
final Weight w = topSearcher.createNormalizedWeight(query, true);
final List<Collection<SearchGroup<BytesRef>>> shardGroups = new ArrayList<>();
List<AbstractFirstPassGroupingCollector<?>> firstPassGroupingCollectors = new ArrayList<>();
AbstractFirstPassGroupingCollector<?> firstPassCollector = null;

View File

@ -602,7 +602,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
BooleanQuery booleanQuery = new BooleanQuery();
booleanQuery.add(new ToChildBlockJoinQuery(new TermQuery(
new Term(FIELD_NAME, "parent")), parentFilter, false), Occur.MUST);
new Term(FIELD_NAME, "parent")), parentFilter), Occur.MUST);
booleanQuery.add(new TermQuery(new Term(FIELD_NAME, "child")), Occur.MUST);
query = booleanQuery;

View File

@ -123,9 +123,9 @@ class TermsIncludingScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
final Weight originalWeight = originalQuery.createWeight(searcher);
return new Weight() {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
return new Weight(TermsIncludingScoreQuery.this) {
private TermsEnum segmentTermsEnum;
@ -149,11 +149,6 @@ class TermsIncludingScoreQuery extends Query {
return new ComplexExplanation(false, 0.0f, "Not a match");
}
@Override
public Query getQuery() {
return TermsIncludingScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
return originalWeight.getValueForNormalization() * TermsIncludingScoreQuery.this.getBoost() * TermsIncludingScoreQuery.this.getBoost();
@ -165,7 +160,7 @@ class TermsIncludingScoreQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Terms terms = context.reader().terms(field);
if (terms == null) {
return null;

View File

@ -61,34 +61,30 @@ public class ToChildBlockJoinQuery extends Query {
// original, so that user does not have to .rewrite() their
// query before searching:
private final Query origParentQuery;
private final boolean doScores;
/**
* Create a ToChildBlockJoinQuery.
*
* @param parentQuery Query that matches parent documents
* @param parentsFilter Filter identifying the parent documents.
* @param doScores true if parent scores should be calculated
*/
public ToChildBlockJoinQuery(Query parentQuery, BitDocIdSetFilter parentsFilter, boolean doScores) {
public ToChildBlockJoinQuery(Query parentQuery, BitDocIdSetFilter parentsFilter) {
super();
this.origParentQuery = parentQuery;
this.parentQuery = parentQuery;
this.parentsFilter = parentsFilter;
this.doScores = doScores;
}
private ToChildBlockJoinQuery(Query origParentQuery, Query parentQuery, BitDocIdSetFilter parentsFilter, boolean doScores) {
private ToChildBlockJoinQuery(Query origParentQuery, Query parentQuery, BitDocIdSetFilter parentsFilter) {
super();
this.origParentQuery = origParentQuery;
this.parentQuery = parentQuery;
this.parentsFilter = parentsFilter;
this.doScores = doScores;
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher), parentsFilter, doScores);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores), parentsFilter, needsScores);
}
/** Return our parent query. */
@ -103,18 +99,13 @@ public class ToChildBlockJoinQuery extends Query {
private final boolean doScores;
public ToChildBlockJoinWeight(Query joinQuery, Weight parentWeight, BitDocIdSetFilter parentsFilter, boolean doScores) {
super();
super(joinQuery);
this.joinQuery = joinQuery;
this.parentWeight = parentWeight;
this.parentsFilter = parentsFilter;
this.doScores = doScores;
}
@Override
public Query getQuery() {
return joinQuery;
}
@Override
public float getValueForNormalization() throws IOException {
return parentWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost();
@ -128,9 +119,9 @@ public class ToChildBlockJoinQuery extends Query {
// NOTE: acceptDocs applies (and is checked) only in the
// child document space
@Override
public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs) throws IOException {
final Scorer parentScorer = parentWeight.scorer(readerContext, null, needsScores);
final Scorer parentScorer = parentWeight.scorer(readerContext, null);
if (parentScorer == null) {
// No matches
@ -354,8 +345,7 @@ public class ToChildBlockJoinQuery extends Query {
if (parentRewrite != parentQuery) {
Query rewritten = new ToChildBlockJoinQuery(parentQuery,
parentRewrite,
parentsFilter,
doScores);
parentsFilter);
rewritten.setBoost(getBoost());
return rewritten;
} else {
@ -374,7 +364,6 @@ public class ToChildBlockJoinQuery extends Query {
final ToChildBlockJoinQuery other = (ToChildBlockJoinQuery) _other;
return origParentQuery.equals(other.origParentQuery) &&
parentsFilter.equals(other.parentsFilter) &&
doScores == other.doScores &&
super.equals(other);
} else {
return false;
@ -386,7 +375,6 @@ public class ToChildBlockJoinQuery extends Query {
final int prime = 31;
int hash = super.hashCode();
hash = prime * hash + origParentQuery.hashCode();
hash = prime * hash + new Boolean(doScores).hashCode();
hash = prime * hash + parentsFilter.hashCode();
return hash;
}
@ -394,7 +382,6 @@ public class ToChildBlockJoinQuery extends Query {
@Override
public ToChildBlockJoinQuery clone() {
return new ToChildBlockJoinQuery(origParentQuery.clone(),
parentsFilter,
doScores);
parentsFilter);
}
}

View File

@ -56,7 +56,7 @@ public class ToParentBlockJoinIndexSearcher extends IndexSearcher {
// we force the use of Scorer (not BulkScorer) to make sure
// that the scorer passed to LeafCollector.setScorer supports
// Scorer.getChildren
Scorer scorer = weight.scorer(ctx, ctx.reader().getLiveDocs(), true);
Scorer scorer = weight.scorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
final LeafCollector leafCollector = collector.getLeafCollector(ctx);
leafCollector.setScorer(scorer);

View File

@ -120,8 +120,8 @@ public class ToParentBlockJoinQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher), parentsFilter, scoreMode);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores), parentsFilter, scoreMode);
}
/** Return our child query. */
@ -136,18 +136,13 @@ public class ToParentBlockJoinQuery extends Query {
private final ScoreMode scoreMode;
public BlockJoinWeight(Query joinQuery, Weight childWeight, BitDocIdSetFilter parentsFilter, ScoreMode scoreMode) {
super();
super(joinQuery);
this.joinQuery = joinQuery;
this.childWeight = childWeight;
this.parentsFilter = parentsFilter;
this.scoreMode = scoreMode;
}
@Override
public Query getQuery() {
return joinQuery;
}
@Override
public float getValueForNormalization() throws IOException {
return childWeight.getValueForNormalization() * joinQuery.getBoost() * joinQuery.getBoost();
@ -161,9 +156,9 @@ public class ToParentBlockJoinQuery extends Query {
// NOTE: acceptDocs applies (and is checked) only in the
// parent document space
@Override
public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext readerContext, Bits acceptDocs) throws IOException {
final Scorer childScorer = childWeight.scorer(readerContext, readerContext.reader().getLiveDocs(), needsScores);
final Scorer childScorer = childWeight.scorer(readerContext, readerContext.reader().getLiveDocs());
if (childScorer == null) {
// No matches
return null;
@ -189,7 +184,7 @@ public class ToParentBlockJoinQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, context.reader().getLiveDocs(), true);
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context, context.reader().getLiveDocs());
if (scorer != null && scorer.advance(doc) == doc) {
return scorer.explain(context.docBase);
}

View File

@ -231,7 +231,7 @@ public class TestBlockJoin extends LuceneTestCase {
//System.out.println("TEST: now test up");
// Now join "up" (map parent hits to child docs) instead...:
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter, random().nextBoolean());
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter);
BooleanQuery fullChildQuery = new BooleanQuery();
fullChildQuery.add(new BooleanClause(parentJoinQuery, Occur.MUST));
fullChildQuery.add(new BooleanClause(childQuery, Occur.MUST));
@ -375,15 +375,15 @@ public class TestBlockJoin extends LuceneTestCase {
TermQuery us = new TermQuery(new Term("country", "United States"));
assertEquals("@ US we have java and ruby", 2,
s.search(new ToChildBlockJoinQuery(us,
parentsFilter, random().nextBoolean()), 10).totalHits );
parentsFilter), 10).totalHits );
assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter, random().nextBoolean()),
assertEquals("java skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter),
skill("java"), 10).totalHits );
BooleanQuery rubyPython = new BooleanQuery();
rubyPython.add(new TermQuery(new Term("skill", "ruby")), Occur.SHOULD);
rubyPython.add(new TermQuery(new Term("skill", "python")), Occur.SHOULD);
assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter, random().nextBoolean()),
assertEquals("ruby skills in US", 1, s.search(new ToChildBlockJoinQuery(us, parentsFilter),
new QueryWrapperFilter(rubyPython), 10).totalHits );
r.close();
@ -919,7 +919,7 @@ public class TestBlockJoin extends LuceneTestCase {
}
// Maps parent query to child docs:
final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter, random().nextBoolean());
final ToChildBlockJoinQuery parentJoinQuery2 = new ToChildBlockJoinQuery(parentQuery2, parentsFilter);
// To run against the block-join index:
final Query childJoinQuery2;
@ -1188,8 +1188,8 @@ public class TestBlockJoin extends LuceneTestCase {
new TermQuery(new Term("parent", "1"))));
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
Weight weight = s.createNormalizedWeight(q);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null, true);
Weight weight = s.createNormalizedWeight(q, true);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null);
assertEquals(1, disi.advance(1));
r.close();
dir.close();
@ -1222,8 +1222,8 @@ public class TestBlockJoin extends LuceneTestCase {
new TermQuery(new Term("isparent", "yes"))));
ToParentBlockJoinQuery q = new ToParentBlockJoinQuery(tq, parentFilter, ScoreMode.Avg);
Weight weight = s.createNormalizedWeight(q);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null, true);
Weight weight = s.createNormalizedWeight(q, true);
DocIdSetIterator disi = weight.scorer(s.getIndexReader().leaves().get(0), null);
assertEquals(2, disi.advance(0));
r.close();
dir.close();
@ -1548,7 +1548,7 @@ public class TestBlockJoin extends LuceneTestCase {
Query parentQuery = new TermQuery(new Term("parent", "2"));
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter, random().nextBoolean());
ToChildBlockJoinQuery parentJoinQuery = new ToChildBlockJoinQuery(parentQuery, parentsFilter);
TopDocs topdocs = s.search(parentJoinQuery, 3);
assertEquals(1, topdocs.totalHits);

View File

@ -107,7 +107,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
public void testNextDocValidationForToChildBjq() throws Exception {
Query parentQueryWithRandomChild = createParentsQueryWithOneChild(getRandomChildNumber(0));
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false);
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter);
thrown.expect(IllegalStateException.class);
thrown.expectMessage(ToChildBlockJoinQuery.INVALID_QUERY_MESSAGE);
indexSearcher.search(blockJoinQuery, 1);
@ -117,7 +117,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
public void testValidationForToChildBjqWithChildFilterQuery() throws Exception {
Query parentQueryWithRandomChild = createParentQuery();
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false);
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter);
Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("common_field", "1")));
thrown.expect(IllegalStateException.class);
thrown.expectMessage(ToChildBlockJoinQuery.ILLEGAL_ADVANCE_ON_PARENT);
@ -131,7 +131,7 @@ public class TestBlockJoinValidation extends LuceneTestCase {
// in BJQ must be greater than child number in Boolean clause
int nextRandomChildNumber = getRandomChildNumber(randomChildNumber);
Query parentQueryWithRandomChild = createParentsQueryWithOneChild(nextRandomChildNumber);
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter, false);
ToChildBlockJoinQuery blockJoinQuery = new ToChildBlockJoinQuery(parentQueryWithRandomChild, parentsFilter);
// advance() method is used by ConjunctionScorer, so we need to create Boolean conjunction query
BooleanQuery conjunctionQuery = new BooleanQuery();
WildcardQuery childQuery = new WildcardQuery(new Term("child", createFieldValue(randomChildNumber)));

View File

@ -54,8 +54,8 @@ public class BoostingQuery extends Query {
public Query rewrite(IndexReader reader) throws IOException {
BooleanQuery result = new BooleanQuery() {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BooleanWeight(searcher, false) {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BooleanWeight(searcher, needsScores, false) {
@Override
public float coord(int overlap, int max) {

View File

@ -187,21 +187,16 @@ public class CustomScoreQuery extends Query {
boolean qStrict;
float queryWeight;
public CustomWeight(IndexSearcher searcher) throws IOException {
this.subQueryWeight = subQuery.createWeight(searcher);
public CustomWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
super(CustomScoreQuery.this);
this.subQueryWeight = subQuery.createWeight(searcher, needsScores);
this.valSrcWeights = new Weight[scoringQueries.length];
for(int i = 0; i < scoringQueries.length; i++) {
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher);
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores);
}
this.qStrict = strict;
}
/*(non-Javadoc) @see org.apache.lucene.search.Weight#getQuery() */
@Override
public Query getQuery() {
return CustomScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = subQueryWeight.getValueForNormalization();
@ -235,14 +230,14 @@ public class CustomScoreQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context, acceptDocs, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context, acceptDocs);
if (subQueryScorer == null) {
return null;
}
Scorer[] valSrcScorers = new Scorer[valSrcWeights.length];
for(int i = 0; i < valSrcScorers.length; i++) {
valSrcScorers[i] = valSrcWeights[i].scorer(context, acceptDocs, needsScores);
valSrcScorers[i] = valSrcWeights[i].scorer(context, acceptDocs);
}
return new CustomScorer(CustomScoreQuery.this.getCustomScoreProvider(context), this, queryWeight, subQueryScorer, valSrcScorers);
}
@ -373,8 +368,8 @@ public class CustomScoreQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new CustomWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new CustomWeight(searcher, needsScores);
}
/**

View File

@ -68,8 +68,8 @@ public class BoostedQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return new BoostedQuery.BoostedWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new BoostedQuery.BoostedWeight(searcher, needsScores);
}
private class BoostedWeight extends Weight {
@ -77,18 +77,14 @@ public class BoostedQuery extends Query {
Weight qWeight;
Map fcontext;
public BoostedWeight(IndexSearcher searcher) throws IOException {
public BoostedWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
super(BoostedQuery.this);
this.searcher = searcher;
this.qWeight = q.createWeight(searcher);
this.qWeight = q.createWeight(searcher, needsScores);
this.fcontext = ValueSource.newContext(searcher);
boostVal.createWeight(fcontext,searcher);
}
@Override
public Query getQuery() {
return BoostedQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
float sum = qWeight.getValueForNormalization();
@ -103,8 +99,8 @@ public class BoostedQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
Scorer subQueryScorer = qWeight.scorer(context, acceptDocs, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer subQueryScorer = qWeight.scorer(context, acceptDocs);
if (subQueryScorer == null) {
return null;
}

View File

@ -72,16 +72,12 @@ public class FunctionQuery extends Query {
protected final Map context;
public FunctionWeight(IndexSearcher searcher) throws IOException {
super(FunctionQuery.this);
this.searcher = searcher;
this.context = ValueSource.newContext(searcher);
func.createWeight(context, searcher);
}
@Override
public Query getQuery() {
return FunctionQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
@ -95,13 +91,13 @@ public class FunctionQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new AllScorer(context, acceptDocs, this, queryWeight);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return ((AllScorer)scorer(context, context.reader().getLiveDocs(), true)).explain(doc);
return ((AllScorer)scorer(context, context.reader().getLiveDocs())).explain(doc);
}
}
@ -208,7 +204,7 @@ public class FunctionQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new FunctionQuery.FunctionWeight(searcher);
}

View File

@ -72,7 +72,7 @@ public class QueryValueSource extends ValueSource {
@Override
public void createWeight(Map context, IndexSearcher searcher) throws IOException {
Weight w = searcher.createNormalizedWeight(q);
Weight w = searcher.createNormalizedWeight(q, true);
context.put(this, w);
}
}
@ -126,7 +126,7 @@ class QueryDocValues extends FloatDocValues {
try {
if (doc < lastDocRequested) {
if (noMatches) return defVal;
scorer = weight.scorer(readerContext, acceptDocs, true);
scorer = weight.scorer(readerContext, acceptDocs);
if (scorer==null) {
noMatches = true;
return defVal;
@ -157,7 +157,7 @@ class QueryDocValues extends FloatDocValues {
try {
if (doc < lastDocRequested) {
if (noMatches) return false;
scorer = weight.scorer(readerContext, acceptDocs, true);
scorer = weight.scorer(readerContext, acceptDocs);
scorerDoc = -1;
if (scorer==null) {
noMatches = true;
@ -215,7 +215,7 @@ class QueryDocValues extends FloatDocValues {
mval.exists = false;
return;
}
scorer = weight.scorer(readerContext, acceptDocs, true);
scorer = weight.scorer(readerContext, acceptDocs);
scorerDoc = -1;
if (scorer==null) {
noMatches = true;

View File

@ -188,7 +188,7 @@ public class TermAutomatonQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
IndexReaderContext context = searcher.getTopReaderContext();
Map<Integer,TermContext> termStates = new HashMap<>();
@ -347,6 +347,7 @@ public class TermAutomatonQuery extends Query {
private final Similarity similarity;
public TermAutomatonWeight(Automaton automaton, IndexSearcher searcher, Map<Integer,TermContext> termStates) throws IOException {
super(TermAutomatonQuery.this);
this.automaton = automaton;
this.searcher = searcher;
this.termStates = termStates;
@ -369,11 +370,6 @@ public class TermAutomatonQuery extends Query {
return "weight(" + TermAutomatonQuery.this + ")";
}
@Override
public Query getQuery() {
return TermAutomatonQuery.this;
}
@Override
public float getValueForNormalization() {
return stats.getValueForNormalization();
@ -385,7 +381,7 @@ public class TermAutomatonQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
// Initialize the enums; null for a given slot means that term didn't appear in this reader
EnumAndScorer[] enums = new EnumAndScorer[idToTerm.size()];

View File

@ -99,13 +99,13 @@ public class TestSlowCollationMethods extends LuceneTestCase {
});
final Sort sort = new Sort(sf);
final TopDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random().nextInt(4)), sort);
final TopFieldDocs docs1 = searcher.search(TermRangeQuery.newStringRange("field", null, splitDoc, true, true), null, numDocs/(1+random().nextInt(4)), sort);
doCheckSorting(docs1);
final TopDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random().nextInt(4)), sort);
final TopFieldDocs docs2 = searcher.search(TermRangeQuery.newStringRange("field", splitDoc, null, true, true), null, numDocs/(1+random().nextInt(4)), sort);
doCheckSorting(docs2);
final TopDocs docs = TopDocs.merge(sort, numDocs/(1+random().nextInt(4)), new TopDocs[]{docs1, docs2});
final TopFieldDocs docs = TopDocs.merge(sort, numDocs/(1+random().nextInt(4)), new TopFieldDocs[]{docs1, docs2});
doCheckSorting(docs);
}

View File

@ -56,8 +56,8 @@ public class AssertingIndexSearcher extends IndexSearcher {
/** Ensures, that the returned {@code Weight} is not normalized again, which may produce wrong scores. */
@Override
public Weight createNormalizedWeight(Query query) throws IOException {
final Weight w = super.createNormalizedWeight(query);
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
final Weight w = super.createNormalizedWeight(query, needsScores);
return new AssertingWeight(random, w) {
@Override
@ -66,8 +66,8 @@ public class AssertingIndexSearcher extends IndexSearcher {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
Scorer scorer = w.scorer(context, acceptDocs, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
Scorer scorer = w.scorer(context, acceptDocs);
if (scorer != null) {
// check that scorer obeys disi contract for docID() before next()/advance
try {

View File

@ -42,8 +42,8 @@ public class AssertingQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher));
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return AssertingWeight.wrap(new Random(random.nextLong()), in.createWeight(searcher, needsScores));
}
@Override

View File

@ -33,6 +33,7 @@ class AssertingWeight extends Weight {
final Weight in;
AssertingWeight(Random random, Weight in) {
super(in.getQuery());
this.random = random;
this.in = in;
}
@ -42,11 +43,6 @@ class AssertingWeight extends Weight {
return in.explain(context, doc);
}
@Override
public Query getQuery() {
return in.getQuery();
}
@Override
public float getValueForNormalization() throws IOException {
return in.getValueForNormalization();
@ -58,15 +54,15 @@ class AssertingWeight extends Weight {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
final Scorer inScorer = in.scorer(context, acceptDocs, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
final Scorer inScorer = in.scorer(context, acceptDocs);
assert inScorer == null || inScorer.docID() == -1;
return AssertingScorer.wrap(new Random(random.nextLong()), inScorer);
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
BulkScorer inScorer = in.bulkScorer(context, acceptDocs, needsScores);
public BulkScorer bulkScorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
BulkScorer inScorer = in.bulkScorer(context, acceptDocs);
if (inScorer == null) {
return null;
}

View File

@ -261,9 +261,9 @@ public class QueryUtils {
lastDoc[0] = doc;
try {
if (scorer == null) {
Weight w = s.createNormalizedWeight(q);
Weight w = s.createNormalizedWeight(q, true);
LeafReaderContext context = readerContextArray.get(leafPtr);
scorer = w.scorer(context, context.reader().getLiveDocs(), true);
scorer = w.scorer(context, context.reader().getLiveDocs());
}
int op = order[(opidx[0]++) % order.length];
@ -313,9 +313,9 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q);
Weight w = indexSearcher.createNormalizedWeight(q, true);
LeafReaderContext ctx = (LeafReaderContext)indexSearcher.getTopReaderContext();
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs(), true);
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@ -335,9 +335,9 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader, false);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q);
Weight w = indexSearcher.createNormalizedWeight(q, true);
LeafReaderContext ctx = previousReader.getContext();
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs(), true);
Scorer scorer = w.scorer(ctx, ctx.reader().getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@ -367,8 +367,8 @@ public class QueryUtils {
try {
long startMS = System.currentTimeMillis();
for (int i=lastDoc[0]+1; i<=doc; i++) {
Weight w = s.createNormalizedWeight(q);
Scorer scorer = w.scorer(context.get(leafPtr), liveDocs, true);
Weight w = s.createNormalizedWeight(q, true);
Scorer scorer = w.scorer(context.get(leafPtr), liveDocs);
Assert.assertTrue("query collected "+doc+" but skipTo("+i+") says no more docs!",scorer.advance(i) != DocIdSetIterator.NO_MORE_DOCS);
Assert.assertEquals("query collected "+doc+" but skipTo("+i+") got to "+scorer.docID(),doc,scorer.docID());
float skipToScore = scorer.score();
@ -400,8 +400,8 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs(), true);
Weight w = indexSearcher.createNormalizedWeight(q, true);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@ -421,8 +421,8 @@ public class QueryUtils {
final LeafReader previousReader = lastReader[0];
IndexSearcher indexSearcher = LuceneTestCase.newSearcher(previousReader);
indexSearcher.setSimilarity(s.getSimilarity());
Weight w = indexSearcher.createNormalizedWeight(q);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs(), true);
Weight w = indexSearcher.createNormalizedWeight(q, true);
Scorer scorer = w.scorer((LeafReaderContext)indexSearcher.getTopReaderContext(), previousReader.getLiveDocs());
if (scorer != null) {
boolean more = scorer.advance(lastDoc[0] + 1) != DocIdSetIterator.NO_MORE_DOCS;
Assert.assertFalse("query's last doc was "+ lastDoc[0] +" but skipTo("+(lastDoc[0]+1)+") got to "+scorer.docID(),more);
@ -432,10 +432,10 @@ public class QueryUtils {
/** Check that the scorer and bulk scorer advance consistently. */
public static void checkBulkScorerSkipTo(Random r, Query query, IndexSearcher searcher) throws IOException {
Weight weight = searcher.createNormalizedWeight(query);
Weight weight = searcher.createNormalizedWeight(query, true);
for (LeafReaderContext context : searcher.getIndexReader().leaves()) {
final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs(), true);
final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs(), true);
final Scorer scorer = weight.scorer(context, context.reader().getLiveDocs());
final BulkScorer bulkScorer = weight.bulkScorer(context, context.reader().getLiveDocs());
if (scorer == null && bulkScorer == null) {
continue;
}

View File

@ -360,7 +360,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
}
// Merge:
return TopDocs.merge(null, numHits, shardHits);
return TopDocs.merge(numHits, shardHits);
}
public TopDocs localSearch(Query query, int numHits) throws IOException {
@ -369,6 +369,9 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
@Override
public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException {
if (after == null) {
return super.searchAfter(after, query, numHits);
}
final TopDocs[] shardHits = new TopDocs[nodeVersions.length];
// results are merged in that order: score, shardIndex, doc. therefore we set
// after to after.score and depending on the nodeID we set doc to either:
@ -412,7 +415,7 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
}
// Merge:
return TopDocs.merge(null, numHits, shardHits);
return TopDocs.merge(numHits, shardHits);
}
public TopDocs localSearchAfter(ScoreDoc after, Query query, int numHits) throws IOException {
@ -422,14 +425,14 @@ public abstract class ShardSearchingTestBase extends LuceneTestCase {
@Override
public TopFieldDocs search(Query query, int numHits, Sort sort) throws IOException {
assert sort != null;
final TopDocs[] shardHits = new TopDocs[nodeVersions.length];
final TopFieldDocs[] shardHits = new TopFieldDocs[nodeVersions.length];
for(int nodeID=0;nodeID<nodeVersions.length;nodeID++) {
if (nodeID == myNodeID) {
// My node; run using local shard searcher we
// already aquired:
shardHits[nodeID] = localSearch(query, numHits, sort);
} else {
shardHits[nodeID] = searchNode(nodeID, nodeVersions, query, sort, numHits, null);
shardHits[nodeID] = (TopFieldDocs) searchNode(nodeID, nodeVersions, query, sort, numHits, null);
}
}

View File

@ -132,7 +132,7 @@ class ChildDocTransformer extends TransformerWithContext {
try {
Query parentQuery = idFt.getFieldQuery(null, idField, parentIdExt);
Query query = new ToChildBlockJoinQuery(parentQuery, parentsFilter, false);
Query query = new ToChildBlockJoinQuery(parentQuery, parentsFilter);
DocList children = context.searcher.getDocList(query, childFilterQuery, new Sort(), 0, limit);
if(children.matches() > 0) {
DocIterator i = children.iterator();

View File

@ -319,6 +319,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
protected Map lonContext;
public SpatialWeight(IndexSearcher searcher) throws IOException {
super(SpatialDistanceQuery.this);
this.searcher = searcher;
this.latContext = ValueSource.newContext(searcher);
this.lonContext = ValueSource.newContext(searcher);
@ -326,11 +327,6 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
lonSource.createWeight(lonContext, searcher);
}
@Override
public Query getQuery() {
return SpatialDistanceQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
@ -344,13 +340,13 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new SpatialScorer(context, acceptDocs, this, queryWeight);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return ((SpatialScorer)scorer(context, context.reader().getLiveDocs(), true)).explain(doc);
return ((SpatialScorer)scorer(context, context.reader().getLiveDocs())).explain(doc);
}
}
@ -567,7 +563,7 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
// if we were supposed to use bboxQuery, then we should have been rewritten using that query
assert bboxQuery == null;
return new SpatialWeight(searcher);

View File

@ -80,7 +80,7 @@ public class ExportQParserPlugin extends QParserPlugin {
}
public Weight createWeight(IndexSearcher searcher) throws IOException {
return mainQuery.createWeight(searcher);
return mainQuery.createWeight(searcher, true);
}
public Query rewrite(IndexReader reader) throws IOException {

View File

@ -210,7 +210,7 @@ class JoinQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return new JoinQueryWeight((SolrIndexSearcher)searcher);
}
@ -224,6 +224,7 @@ class JoinQuery extends Query {
ResponseBuilder rb;
public JoinQueryWeight(SolrIndexSearcher searcher) {
super(JoinQuery.this);
this.fromSearcher = searcher;
SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
if (info != null) {
@ -280,11 +281,6 @@ class JoinQuery extends Query {
this.toSearcher = searcher;
}
@Override
public Query getQuery() {
return JoinQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
@ -303,7 +299,7 @@ class JoinQuery extends Query {
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
if (filter == null) {
boolean debug = rb != null && rb.isDebug();
long start = debug ? System.currentTimeMillis() : 0;
@ -572,7 +568,7 @@ class JoinQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Scorer scorer = scorer(context, context.reader().getLiveDocs(), true);
Scorer scorer = scorer(context, context.reader().getLiveDocs());
boolean exists = scorer.advance(doc) == doc;
ComplexExplanation result = new ComplexExplanation();

View File

@ -171,8 +171,8 @@ public class ReRankQParserPlugin extends QParserPlugin {
}
public Weight createWeight(IndexSearcher searcher) throws IOException{
return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException{
return new ReRankWeight(mainQuery, reRankQuery, reRankWeight, searcher, needsScores);
}
}
@ -182,23 +182,20 @@ public class ReRankQParserPlugin extends QParserPlugin {
private Weight mainWeight;
private double reRankWeight;
public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher) throws IOException {
public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores) throws IOException {
super(mainQuery);
this.reRankQuery = reRankQuery;
this.searcher = searcher;
this.reRankWeight = reRankWeight;
this.mainWeight = mainQuery.createWeight(searcher);
this.mainWeight = mainQuery.createWeight(searcher, needsScores);
}
public float getValueForNormalization() throws IOException {
return mainWeight.getValueForNormalization();
}
public Scorer scorer(LeafReaderContext context, Bits bits, boolean needsScores) throws IOException {
return mainWeight.scorer(context, bits, needsScores);
}
public Query getQuery() {
return mainWeight.getQuery();
public Scorer scorer(LeafReaderContext context, Bits bits) throws IOException {
return mainWeight.scorer(context, bits);
}
public void normalize(float norm, float topLevelBoost) {

View File

@ -106,16 +106,12 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend
private Map context;
public ConstantWeight(IndexSearcher searcher) throws IOException {
super(SolrConstantScoreQuery.this);
this.context = ValueSource.newContext(searcher);
if (filter instanceof SolrFilter)
((SolrFilter)filter).createWeight(context, searcher);
}
@Override
public Query getQuery() {
return SolrConstantScoreQuery.this;
}
@Override
public float getValueForNormalization() throws IOException {
queryWeight = getBoost();
@ -129,7 +125,7 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return new ConstantScorer(context, this, queryWeight, acceptDocs);
}
@ -233,7 +229,7 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend
}
@Override
public Weight createWeight(IndexSearcher searcher) {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
try {
return new SolrConstantScoreQuery.ConstantWeight(searcher);
} catch (IOException e) {

View File

@ -1113,7 +1113,7 @@ public class SolrIndexSearcher extends IndexSearcher implements Closeable,SolrIn
List<Weight> weights = new ArrayList<>(notCached.size());
for (Query q : notCached) {
Query qq = QueryUtils.makeQueryable(q);
weights.add(createNormalizedWeight(qq));
weights.add(createNormalizedWeight(qq, true));
}
pf.filter = new FilterImpl(answer, weights);
} else {
@ -2474,7 +2474,7 @@ class FilterImpl extends Filter {
iterators.add(iter);
}
for (Weight w : weights) {
Scorer scorer = w.scorer(context, context.reader().getLiveDocs(), true);
Scorer scorer = w.scorer(context, context.reader().getLiveDocs());
if (scorer == null) return null;
iterators.add(scorer);
}

View File

@ -53,8 +53,8 @@ public class WrappedQuery extends ExtendedQueryBase {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
return q.createWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
return q.createWeight(searcher, needsScores);
}
@Override

View File

@ -20,6 +20,7 @@ package org.apache.solr.search.grouping.distributed.responseprocessor;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.grouping.GroupDocs;
import org.apache.lucene.search.grouping.TopGroups;
import org.apache.lucene.util.BytesRef;
@ -171,7 +172,12 @@ public class TopGroupsShardResponseProcessor implements ShardResponseProcessor {
}
int topN = rb.getGroupingSpec().getOffset() + rb.getGroupingSpec().getLimit();
TopDocs mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopDocs[topDocs.size()]));
final TopDocs mergedTopDocs;
if (sortWithinGroup == null) {
mergedTopDocs = TopDocs.merge(topN, topDocs.toArray(new TopDocs[topDocs.size()]));
} else {
mergedTopDocs = TopDocs.merge(sortWithinGroup, topN, topDocs.toArray(new TopFieldDocs[topDocs.size()]));
}
rb.mergedQueryCommandResults.put(query, new QueryCommandResult(mergedTopDocs, mergedMatches));
}

View File

@ -29,7 +29,7 @@ public class BlockJoinChildQParser extends BlockJoinParentQParser {
}
protected Query createQuery(Query parentListQuery, Query query) {
return new ToChildBlockJoinQuery(query, getFilter(parentListQuery), false);
return new ToChildBlockJoinQuery(query, getFilter(parentListQuery));
}
@Override

View File

@ -53,8 +53,8 @@ public class IgnoreAcceptDocsQuery extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
Weight inner = q.createWeight(searcher);
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
Weight inner = q.createWeight(searcher, needsScores);
return new IADWeight(inner);
}
@ -62,6 +62,7 @@ public class IgnoreAcceptDocsQuery extends Query {
Weight w;
IADWeight(Weight delegate) {
super(q);
this.w = delegate;
}
@ -70,11 +71,6 @@ public class IgnoreAcceptDocsQuery extends Query {
return w.explain(context, doc);
}
@Override
public Query getQuery() {
return q;
}
@Override
public float getValueForNormalization() throws IOException {
return w.getValueForNormalization();
@ -86,8 +82,8 @@ public class IgnoreAcceptDocsQuery extends Query {
}
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
return w.scorer(context, null, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return w.scorer(context, null);
}
}

View File

@ -64,17 +64,14 @@ final class DeleteByQueryWrapper extends Query {
}
@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final LeafReader wrapped = wrap((LeafReader) searcher.getIndexReader());
final IndexSearcher privateContext = new IndexSearcher(wrapped);
final Weight inner = in.createWeight(privateContext);
return new Weight() {
final Weight inner = in.createWeight(privateContext, needsScores);
return new Weight(DeleteByQueryWrapper.this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException { throw new UnsupportedOperationException(); }
@Override
public Query getQuery() { return DeleteByQueryWrapper.this; }
@Override
public float getValueForNormalization() throws IOException { return inner.getValueForNormalization(); }
@ -82,8 +79,8 @@ final class DeleteByQueryWrapper extends Query {
public void normalize(float norm, float topLevelBoost) { inner.normalize(norm, topLevelBoost); }
@Override
public Scorer scorer(LeafReaderContext context, Bits acceptDocs, boolean needsScores) throws IOException {
return inner.scorer(privateContext.getIndexReader().leaves().get(0), acceptDocs, needsScores);
public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOException {
return inner.scorer(privateContext.getIndexReader().leaves().get(0), acceptDocs);
}
};
}

View File

@ -114,8 +114,8 @@ public class TestRankQueryPlugin extends QParserPlugin {
return false;
}
public Weight createWeight(IndexSearcher indexSearcher ) throws IOException{
return q.createWeight(indexSearcher);
public Weight createWeight(IndexSearcher indexSearcher, boolean needsScores) throws IOException{
return q.createWeight(indexSearcher, needsScores);
}
public void setBoost(float boost) {