FunctionScoreQuery should implement two-phase iteration.

FunctionScoreQuery should do two things that it doesn't do today:
 - propagate the two-phase iterator from the wrapped scorer so that things are
   still executed efficiently eg. if a phrase or geo-distance query is wrapped
 - filter out docs that don't have a high enough score using two-phase
   iteration: this way the score is only checked when everything else matches

While doing these changes, I noticed that minScore was ignored when scores were
not needed and that explain did not take it into account, so I fixed these
issues as well.
This commit is contained in:
Adrien Grand 2015-12-22 10:04:17 +01:00
parent 56d2dd701e
commit 07658f58a8
7 changed files with 462 additions and 193 deletions

View File

@ -1,152 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
abstract class CustomBoostFactorScorer extends Scorer {
final Scorer scorer;
final DocIdSetIterator iterator;
final float maxBoost;
final CombineFunction scoreCombiner;
Float minScore;
NextDoc nextDoc;
CustomBoostFactorScorer(Weight w, Scorer scorer, float maxBoost, CombineFunction scoreCombiner, Float minScore)
throws IOException {
super(w);
if (minScore == null) {
nextDoc = new AnyNextDoc();
} else {
nextDoc = new MinScoreNextDoc();
}
this.scorer = scorer;
this.iterator = scorer.iterator();
this.maxBoost = maxBoost;
this.scoreCombiner = scoreCombiner;
this.minScore = minScore;
}
@Override
public int docID() {
return scorer.docID();
}
@Override
public DocIdSetIterator iterator() {
return new DocIdSetIterator() {
@Override
public int nextDoc() throws IOException {
return nextDoc.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return nextDoc.advance(target);
}
@Override
public long cost() {
return iterator.cost();
}
@Override
public int docID() {
return iterator.docID();
}
};
}
public abstract float innerScore() throws IOException;
@Override
public float score() throws IOException {
return nextDoc.score();
}
@Override
public int freq() throws IOException {
return scorer.freq();
}
public interface NextDoc {
public int advance(int target) throws IOException;
public int nextDoc() throws IOException;
public float score() throws IOException;
}
public class MinScoreNextDoc implements NextDoc {
float currentScore = Float.MAX_VALUE * -1.0f;
@Override
public int nextDoc() throws IOException {
int doc;
do {
doc = iterator.nextDoc();
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
} while (currentScore < minScore);
return doc;
}
@Override
public float score() throws IOException {
return currentScore;
}
@Override
public int advance(int target) throws IOException {
int doc = iterator.advance(target);
if (doc == DocIdSetIterator.NO_MORE_DOCS) {
return doc;
}
currentScore = innerScore();
if (currentScore < minScore) {
return iterator.nextDoc();
}
return doc;
}
}
public class AnyNextDoc implements NextDoc {
@Override
public int nextDoc() throws IOException {
return iterator.nextDoc();
}
@Override
public float score() throws IOException {
return innerScore();
}
@Override
public int advance(int target) throws IOException {
return iterator.advance(target);
}
}
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@ -142,7 +143,7 @@ public class FiltersFunctionScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (needsScores == false) {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
}
@ -184,11 +185,7 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryWeight.normalize(norm, boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
// we ignore scoreDocsInOrder parameter, because we need to score in
// order if documents are scored with a script. The
// ShardLookup depends on in order scoring.
private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {
return null;
@ -201,15 +198,24 @@ public class FiltersFunctionScoreQuery extends Query {
Scorer filterScorer = filterWeights[i].scorer(context);
docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
}
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore, needsScores);
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, needsScores);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
Scorer scorer = functionScorer(context);
if (scorer != null && minScore != null) {
scorer = new MinScoreScorer(this, scorer, minScore);
}
return scorer;
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Explanation subQueryExpl = subQueryWeight.explain(context, doc);
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
Explanation expl = subQueryWeight.explain(context, doc);
if (!expl.isMatch()) {
return expl;
}
// First: Gather explanations for all filters
List<Explanation> filterExplanations = new ArrayList<>();
@ -218,7 +224,7 @@ public class FiltersFunctionScoreQuery extends Query {
filterWeights[i].scorer(context));
if (docSet.get(doc)) {
FilterFunction filterFunction = filterFunctions[i];
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, expl);
double factor = functionExplanation.getValue();
float sc = CombineFunction.toFloat(factor);
Explanation filterExplanation = Explanation.match(sc, "function score, product of:",
@ -226,46 +232,52 @@ public class FiltersFunctionScoreQuery extends Query {
filterExplanations.add(filterExplanation);
}
}
if (filterExplanations.size() == 0) {
return subQueryExpl;
if (filterExplanations.size() > 0) {
FiltersFunctionFactorScorer scorer = functionScorer(context);
int actualDoc = scorer.iterator().advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, expl.getValue());
Explanation factorExplanation = Explanation.match(
CombineFunction.toFloat(score),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
expl = combineFunction.explain(expl, factorExplanation, maxBoost);
}
FiltersFunctionFactorScorer scorer = (FiltersFunctionFactorScorer)scorer(context);
int actualDoc = scorer.iterator.advance(doc);
assert (actualDoc == doc);
double score = scorer.computeScore(doc, subQueryExpl.getValue());
Explanation factorExplanation = Explanation.match(
CombineFunction.toFloat(score),
"function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]",
filterExplanations);
return combineFunction.explain(subQueryExpl, factorExplanation, maxBoost);
if (minScore != null && minScore > expl.getValue()) {
expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl);
}
return expl;
}
}
static class FiltersFunctionFactorScorer extends CustomBoostFactorScorer {
static class FiltersFunctionFactorScorer extends FilterScorer {
private final FilterFunction[] filterFunctions;
private final ScoreMode scoreMode;
private final LeafScoreFunction[] functions;
private final Bits[] docSets;
private final CombineFunction scoreCombiner;
private final float maxBoost;
private final boolean needsScores;
private FiltersFunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, FilterFunction[] filterFunctions,
float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, Float minScore, boolean needsScores) throws IOException {
super(w, scorer, maxBoost, scoreCombiner, minScore);
float maxBoost, LeafScoreFunction[] functions, Bits[] docSets, CombineFunction scoreCombiner, boolean needsScores) throws IOException {
super(scorer, w);
this.scoreMode = scoreMode;
this.filterFunctions = filterFunctions;
this.functions = functions;
this.docSets = docSets;
this.scoreCombiner = scoreCombiner;
this.maxBoost = maxBoost;
this.needsScores = needsScores;
}
@Override
public float innerScore() throws IOException {
int docId = scorer.docID();
public float score() throws IOException {
int docId = docID();
// Even if the weight is created with needsScores=false, it might
// be costly to call score(), so we explicitly check if scores
// are needed
float subQueryScore = needsScores ? scorer.score() : 0f;
float subQueryScore = needsScores ? super.score() : 0f;
double factor = computeScore(docId, subQueryScore);
return scoreCombiner.combine(subQueryScore, factor, maxBoost);
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
@ -90,7 +91,7 @@ public class FunctionScoreQuery extends Query {
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
if (needsScores == false) {
if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores);
}
@ -128,8 +129,7 @@ public class FunctionScoreQuery extends Query {
subQueryWeight.normalize(norm, boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) {
return null;
@ -138,7 +138,16 @@ public class FunctionScoreQuery extends Query {
if (function != null) {
leafFunction = function.getLeafScoreFunction(context);
}
return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, minScore, needsScores);
return new FunctionFactorScorer(this, subQueryScorer, leafFunction, maxBoost, combineFunction, needsScores);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
Scorer scorer = functionScorer(context);
if (scorer != null && minScore != null) {
scorer = new MinScoreScorer(this, scorer, minScore);
}
return scorer;
}
@Override
@ -147,38 +156,47 @@ public class FunctionScoreQuery extends Query {
if (!subQueryExpl.isMatch()) {
return subQueryExpl;
}
Explanation expl;
if (function != null) {
Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
return combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
expl = combineFunction.explain(subQueryExpl, functionExplanation, maxBoost);
} else {
return subQueryExpl;
expl = subQueryExpl;
}
if (minScore != null && minScore > expl.getValue()) {
expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl);
}
return expl;
}
}
static class FunctionFactorScorer extends CustomBoostFactorScorer {
static class FunctionFactorScorer extends FilterScorer {
private final LeafScoreFunction function;
private final boolean needsScores;
private final CombineFunction scoreCombiner;
private final float maxBoost;
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, Float minScore, boolean needsScores)
private FunctionFactorScorer(CustomBoostFactorWeight w, Scorer scorer, LeafScoreFunction function, float maxBoost, CombineFunction scoreCombiner, boolean needsScores)
throws IOException {
super(w, scorer, maxBoost, scoreCombiner, minScore);
super(scorer, w);
this.function = function;
this.scoreCombiner = scoreCombiner;
this.maxBoost = maxBoost;
this.needsScores = needsScores;
}
@Override
public float innerScore() throws IOException {
public float score() throws IOException {
// Even if the weight is created with needsScores=false, it might
// be costly to call score(), so we explicitly check if scores
// are needed
float score = needsScores ? scorer.score() : 0f;
float score = needsScores ? super.score() : 0f;
if (function == null) {
return score;
} else {
return scoreCombiner.combine(score,
function.score(scorer.docID(), score), maxBoost);
function.score(docID(), score), maxBoost);
}
}
}

View File

@ -0,0 +1,95 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.ScoreCachingWrappingScorer;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
/** A {@link Scorer} that filters out documents that have a score that is
* lower than a configured constant. */
final class MinScoreScorer extends Scorer {
private final Scorer in;
private final float minScore;
MinScoreScorer(Weight weight, Scorer scorer, float minScore) {
super(weight);
if (scorer instanceof ScoreCachingWrappingScorer == false) {
// when minScore is set, scores might be requested twice: once
// to verify the match, and once by the collector
scorer = new ScoreCachingWrappingScorer(scorer);
}
this.in = scorer;
this.minScore = minScore;
}
public Scorer getScorer() {
return in;
}
@Override
public int docID() {
return in.docID();
}
@Override
public float score() throws IOException {
return in.score();
}
@Override
public int freq() throws IOException {
return in.freq();
}
@Override
public DocIdSetIterator iterator() {
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
}
@Override
public TwoPhaseIterator twoPhaseIterator() {
final TwoPhaseIterator inTwoPhase = this.in.twoPhaseIterator();
final DocIdSetIterator approximation = inTwoPhase == null ? in.iterator() : inTwoPhase.approximation();
return new TwoPhaseIterator(approximation) {
@Override
public boolean matches() throws IOException {
// we need to check the two-phase iterator first
// otherwise calling score() is illegal
if (inTwoPhase != null && inTwoPhase.matches() == false) {
return false;
}
return in.score() >= minScore;
}
@Override
public float matchCost() {
return 1000f // random constant for the score computation
+ (inTwoPhase == null ? 0 : inTwoPhase.matchCost());
}
};
}
}

View File

@ -0,0 +1,173 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.search.function;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class MinScoreScorerTests extends LuceneTestCase {
private static DocIdSetIterator iterator(final int... docs) {
return new DocIdSetIterator() {
int i = -1;
@Override
public int nextDoc() throws IOException {
if (i + 1 == docs.length) {
return NO_MORE_DOCS;
} else {
return docs[++i];
}
}
@Override
public int docID() {
return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i];
}
@Override
public long cost() {
return docs.length;
}
@Override
public int advance(int target) throws IOException {
return slowAdvance(target);
}
};
}
private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) {
final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs);
return new Scorer(null) {
public DocIdSetIterator iterator() {
if (twoPhase) {
return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator());
} else {
return iterator;
}
}
public TwoPhaseIterator twoPhaseIterator() {
if (twoPhase) {
return new TwoPhaseIterator(iterator) {
@Override
public boolean matches() throws IOException {
return Arrays.binarySearch(docs, iterator.docID()) >= 0;
}
@Override
public float matchCost() {
return 10;
}
};
} else {
return null;
}
}
@Override
public int docID() {
return iterator.docID();
}
@Override
public float score() throws IOException {
final int idx = Arrays.binarySearch(docs, docID());
return scores[idx];
}
@Override
public int freq() throws IOException {
return 1;
}
};
}
public void doTestRandom(boolean twoPhase) throws IOException {
final int maxDoc = TestUtil.nextInt(random(), 10, 10000);
final int numDocs = TestUtil.nextInt(random(), 1, maxDoc / 2);
final Set<Integer> uniqueDocs = new HashSet<>();
while (uniqueDocs.size() < numDocs) {
uniqueDocs.add(random().nextInt(maxDoc));
}
final int[] docs = new int[numDocs];
int i = 0;
for (int doc : uniqueDocs) {
docs[i++] = doc;
}
Arrays.sort(docs);
final float[] scores = new float[numDocs];
for (i = 0; i < numDocs; ++i) {
scores[i] = random().nextFloat();
}
Scorer scorer = scorer(maxDoc, docs, scores, twoPhase);
final float minScore = random().nextFloat();
Scorer minScoreScorer = new MinScoreScorer(null, scorer, minScore);
int doc = -1;
while (doc != DocIdSetIterator.NO_MORE_DOCS) {
final int target;
if (random().nextBoolean()) {
target = doc + 1;
doc = minScoreScorer.iterator().nextDoc();
} else {
target = doc + TestUtil.nextInt(random(), 1, 10);
doc = minScoreScorer.iterator().advance(target);
}
int idx = Arrays.binarySearch(docs, target);
if (idx < 0) {
idx = -1 - idx;
}
while (idx < docs.length && scores[idx] < minScore) {
idx += 1;
}
if (idx == docs.length) {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc);
} else {
assertEquals(docs[idx], doc);
assertEquals(scores[idx], scorer.score(), 0f);
}
}
}
public void testRegularIterator() throws IOException {
final int iters = atLeast(5);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom(false);
}
}
public void testTwoPhaseIterator() throws IOException {
final int iters = atLeast(5);
for (int iter = 0; iter < iters; ++iter) {
doTestRandom(true);
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RandomApproximationQuery;
import org.apache.lucene.search.SearchEquivalenceTestBase;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.FilterFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
public class FunctionScoreEquivalenceTests extends SearchEquivalenceTestBase {
public void testMinScoreAllIncluded() throws Exception {
Term term = randomTerm();
Query query = new TermQuery(term);
FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY);
assertSameScores(query, fsq);
FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY);
assertSameScores(query, ffsq);
}
public void testMinScoreAllExcluded() throws Exception {
Term term = randomTerm();
Query query = new TermQuery(term);
FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, Float.POSITIVE_INFINITY, null, Float.POSITIVE_INFINITY);
assertSameScores(new MatchNoDocsQuery(), fsq);
FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, Float.POSITIVE_INFINITY, CombineFunction.MULTIPLY);
assertSameScores(new MatchNoDocsQuery(), ffsq);
}
public void testTwoPhaseMinScore() throws Exception {
Term term = randomTerm();
Query query = new TermQuery(term);
Float minScore = random().nextFloat();
FunctionScoreQuery fsq1 = new FunctionScoreQuery(query, null, minScore, null, Float.POSITIVE_INFINITY);
FunctionScoreQuery fsq2 = new FunctionScoreQuery(new RandomApproximationQuery(query, random()), null, minScore, null, Float.POSITIVE_INFINITY);
assertSameScores(fsq1, fsq2);
FiltersFunctionScoreQuery ffsq1 = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, minScore, CombineFunction.MULTIPLY);
FiltersFunctionScoreQuery ffsq2 = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, minScore, CombineFunction.MULTIPLY);
assertSameScores(ffsq1, ffsq2);
}
}

View File

@ -31,6 +31,9 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RandomApproximationQuery;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight;
@ -46,6 +49,8 @@ import org.elasticsearch.common.lucene.search.function.LeafScoreFunction;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.FilterFunction;
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery.ScoreMode;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
@ -559,4 +564,51 @@ public class FunctionScoreTests extends ESTestCase {
float score = topDocsWithWeights.scoreDocs[0].score;
assertThat(score, equalTo(2.0f));
}
public void testMinScoreExplain() throws IOException {
Query query = new MatchAllDocsQuery();
Explanation queryExpl = searcher.explain(query, 0);
FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, 0f, null, Float.POSITIVE_INFINITY);
Explanation fsqExpl = searcher.explain(fsq, 0);
assertTrue(fsqExpl.isMatch());
assertEquals(queryExpl.getValue(), fsqExpl.getValue(), 0f);
assertEquals(queryExpl.getDescription(), fsqExpl.getDescription());
fsq = new FunctionScoreQuery(query, null, 10f, null, Float.POSITIVE_INFINITY);
fsqExpl = searcher.explain(fsq, 0);
assertFalse(fsqExpl.isMatch());
assertEquals("Score value is too low, expected at least 10.0 but got 1.0", fsqExpl.getDescription());
FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 0f, CombineFunction.MULTIPLY);
Explanation ffsqExpl = searcher.explain(ffsq, 0);
assertTrue(ffsqExpl.isMatch());
assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f);
assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription());
ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, 10f, CombineFunction.MULTIPLY);
ffsqExpl = searcher.explain(ffsq, 0);
assertFalse(ffsqExpl.isMatch());
assertEquals("Score value is too low, expected at least 10.0 but got 1.0", ffsqExpl.getDescription());
}
public void testPropagatesApproximations() throws IOException {
Query query = new RandomApproximationQuery(new MatchAllDocsQuery(), random());
IndexSearcher searcher = newSearcher(reader);
searcher.setQueryCache(null); // otherwise we could get a cached entry that does not have approximations
FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, null, null, Float.POSITIVE_INFINITY);
for (boolean needsScores : new boolean[] {true, false}) {
Weight weight = searcher.createWeight(fsq, needsScores);
Scorer scorer = weight.scorer(reader.leaves().get(0));
assertNotNull(scorer.twoPhaseIterator());
}
FiltersFunctionScoreQuery ffsq = new FiltersFunctionScoreQuery(query, ScoreMode.SUM, new FilterFunction[0], Float.POSITIVE_INFINITY, null, CombineFunction.MULTIPLY);
for (boolean needsScores : new boolean[] {true, false}) {
Weight weight = searcher.createWeight(ffsq, needsScores);
Scorer scorer = weight.scorer(reader.leaves().get(0));
assertNotNull(scorer.twoPhaseIterator());
}
}
}