LUCENE-6446: Simplified Explanation API.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1675109 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2015-04-21 12:55:41 +00:00
parent fb5bdbf543
commit c6821e1a19
71 changed files with 496 additions and 673 deletions

View File

@ -123,6 +123,8 @@ API Changes
* LUCENE-6425: Replaced Query.extractTerms with Weight.extractTerms.
(Adrien Grand)
* LUCENE-6446: Simplified Explanation API. (Adrien Grand)
Other
* LUCENE-6413: Test runner should report the number of suites completed/

View File

@ -135,8 +135,7 @@ public class BooleanWeight extends Weight {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
final int minShouldMatch = query.getMinimumNumberShouldMatch();
ComplexExplanation sumExpl = new ComplexExplanation();
sumExpl.setDescription("sum of:");
List<Explanation> subs = new ArrayList<>();
int coord = 0;
float sum = 0.0f;
boolean fail = false;
@ -146,30 +145,17 @@ public class BooleanWeight extends Weight {
for (Iterator<Weight> wIter = weights.iterator(); wIter.hasNext();) {
Weight w = wIter.next();
BooleanClause c = cIter.next();
if (w.scorer(context, context.reader().getLiveDocs()) == null) {
if (c.isRequired()) {
fail = true;
Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
sumExpl.addDetail(r);
}
continue;
}
Explanation e = w.explain(context, doc);
if (e.isMatch()) {
if (c.isScoring()) {
sumExpl.addDetail(e);
subs.add(e);
sum += e.getValue();
coord++;
} else if (c.isRequired()) {
Explanation r = new Explanation(0f, "match on required clause, product of:");
r.addDetail(new Explanation(0f, Occur.FILTER + " clause"));
r.addDetail(e);
sumExpl.addDetail(r);
subs.add(Explanation.match(0f, "match on required clause, product of:",
Explanation.match(0f, Occur.FILTER + " clause"), e));
} else if (c.isProhibited()) {
Explanation r =
new Explanation(0.0f, "match on prohibited clause (" + c.getQuery().toString() + ")");
r.addDetail(e);
sumExpl.addDetail(r);
subs.add(Explanation.noMatch("match on prohibited clause (" + c.getQuery().toString() + ")", e));
fail = true;
}
if (!c.isProhibited()) {
@ -179,39 +165,24 @@ public class BooleanWeight extends Weight {
shouldMatchCount++;
}
} else if (c.isRequired()) {
Explanation r = new Explanation(0.0f, "no match on required clause (" + c.getQuery().toString() + ")");
r.addDetail(e);
sumExpl.addDetail(r);
subs.add(Explanation.noMatch("no match on required clause (" + c.getQuery().toString() + ")", e));
fail = true;
}
}
if (fail) {
sumExpl.setMatch(Boolean.FALSE);
sumExpl.setValue(0.0f);
sumExpl.setDescription
("Failure to meet condition(s) of required/prohibited clause(s)");
return sumExpl;
return Explanation.noMatch("Failure to meet condition(s) of required/prohibited clause(s)", subs);
} else if (matchCount == 0) {
return Explanation.noMatch("No matching clauses", subs);
} else if (shouldMatchCount < minShouldMatch) {
sumExpl.setMatch(Boolean.FALSE);
sumExpl.setValue(0.0f);
sumExpl.setDescription("Failure to match minimum number "+
"of optional clauses: " + minShouldMatch);
return sumExpl;
}
sumExpl.setMatch(0 < matchCount);
sumExpl.setValue(sum);
final float coordFactor = disableCoord ? 1.0f : coord(coord, maxCoord);
if (coordFactor == 1.0f) {
return sumExpl; // eliminate wrapper
return Explanation.noMatch("Failure to match minimum number of optional clauses: " + minShouldMatch, subs);
} else {
ComplexExplanation result = new ComplexExplanation(sumExpl.isMatch(),
sum*coordFactor,
"product of:");
result.addDetail(sumExpl);
result.addDetail(new Explanation(coordFactor,
"coord("+coord+"/"+maxCoord+")"));
// we have a match
Explanation result = Explanation.match(sum, "sum of:", subs);
final float coordFactor = disableCoord ? 1.0f : coord(coord, maxCoord);
if (coordFactor != 1f) {
result = Explanation.match(sum * coordFactor, "product of:",
result, Explanation.match(coordFactor, "coord("+coord+"/"+maxCoord+")"));
}
return result;
}
}

View File

@ -1,71 +0,0 @@
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** Expert: Describes the score computation for document and query, and
* can distinguish a match independent of a positive value. */
public class ComplexExplanation extends Explanation {
private Boolean match;
public ComplexExplanation() {
super();
}
public ComplexExplanation(boolean match, float value, String description) {
// NOTE: use of "boolean" instead of "Boolean" in params is conscious
// choice to encourage clients to be specific.
super(value, description);
this.match = Boolean.valueOf(match);
}
/**
* The match status of this explanation node.
* @return May be null if match status is unknown
*/
public Boolean getMatch() { return match; }
/**
* Sets the match status assigned to this explanation node.
* @param match May be null if match status is unknown
*/
public void setMatch(Boolean match) { this.match = match; }
/**
* Indicates whether or not this Explanation models a good match.
*
* <p>
* If the match status is explicitly set (i.e.: not null) this method
* uses it; otherwise it defers to the superclass.
* </p>
* @see #getMatch
*/
@Override
public boolean isMatch() {
Boolean m = getMatch();
return (null != m ? m.booleanValue() : super.isMatch());
}
@Override
protected String getSummary() {
if (null == getMatch())
return super.getSummary();
return getValue() + " = "
+ (isMatch() ? "(MATCH) " : "(NON-MATCH) ")
+ getDescription();
}
}

View File

@ -117,19 +117,13 @@ public class ConstantScoreQuery extends Query {
final Scorer cs = scorer(context, context.reader().getLiveDocs());
final boolean exists = (cs != null && cs.advance(doc) == doc);
final ComplexExplanation result = new ComplexExplanation();
if (exists) {
result.setDescription(ConstantScoreQuery.this.toString() + ", product of:");
result.setValue(queryWeight);
result.setMatch(Boolean.TRUE);
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(queryNorm, "queryNorm"));
return Explanation.match(
queryWeight, ConstantScoreQuery.this.toString() + ", product of:",
Explanation.match(getBoost(), "boost"), Explanation.match(queryNorm, "queryNorm"));
} else {
result.setDescription(ConstantScoreQuery.this.toString() + " doesn't match id " + doc);
result.setValue(0);
result.setMatch(Boolean.FALSE);
return Explanation.noMatch(ConstantScoreQuery.this.toString() + " doesn't match id " + doc);
}
return result;
}
}

View File

@ -62,19 +62,13 @@ public abstract class ConstantScoreWeight extends Weight {
final Scorer s = scorer(context, context.reader().getLiveDocs());
final boolean exists = (s != null && s.advance(doc) == doc);
final ComplexExplanation result = new ComplexExplanation();
if (exists) {
result.setDescription(getQuery().toString() + ", product of:");
result.setValue(queryWeight);
result.setMatch(Boolean.TRUE);
result.addDetail(new Explanation(getQuery().getBoost(), "boost"));
result.addDetail(new Explanation(queryNorm, "queryNorm"));
return Explanation.match(
queryWeight, getQuery().toString() + ", product of:",
Explanation.match(getQuery().getBoost(), "boost"), Explanation.match(queryNorm, "queryNorm"));
} else {
result.setDescription(getQuery().toString() + " doesn't match id " + doc);
result.setValue(0);
result.setMatch(Boolean.FALSE);
return Explanation.noMatch(getQuery().toString() + " doesn't match id " + doc);
}
return result;
}
@Override

View File

@ -184,21 +184,25 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/** Explain the score we computed for doc */
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
if (disjuncts.size() == 1) return weights.get(0).explain(context,doc);
ComplexExplanation result = new ComplexExplanation();
boolean match = false;
float max = 0.0f, sum = 0.0f;
result.setDescription(tieBreakerMultiplier == 0.0f ? "max of:" : "max plus " + tieBreakerMultiplier + " times others of:");
List<Explanation> subs = new ArrayList<>();
for (Weight wt : weights) {
Explanation e = wt.explain(context, doc);
if (e.isMatch()) {
result.setMatch(Boolean.TRUE);
result.addDetail(e);
match = true;
subs.add(e);
sum += e.getValue();
max = Math.max(max, e.getValue());
}
}
result.setValue(max + (sum - max) * tieBreakerMultiplier);
return result;
if (match) {
final float score = max + (sum - max) * tieBreakerMultiplier;
final String desc = tieBreakerMultiplier == 0.0f ? "max of:" : "max plus " + tieBreakerMultiplier + " times others of:";
return Explanation.match(score, desc, subs);
} else {
return Explanation.noMatch("No matching clause");
}
}
} // end of DisjunctionMaxWeight inner class

View File

@ -18,45 +18,74 @@ package org.apache.lucene.search;
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/** Expert: Describes the score computation for document and query. */
public class Explanation {
private float value; // the value of this node
private String description; // what it represents
private ArrayList<Explanation> details; // sub-explanations
public Explanation() {}
public Explanation(float value, String description) {
this.value = value;
this.description = description;
/**
* Create a new explanation for a match.
* @param value the contribution to the score of the document
* @param description how {@code value} was computed
* @param details sub explanations that contributed to this explanation
*/
public static Explanation match(float value, String description, Collection<Explanation> details) {
return new Explanation(true, value, description, details);
}
/**
* Indicates whether or not this Explanation models a good match.
*
* <p>
* By default, an Explanation represents a "match" if the value is positive.
* </p>
* @see #getValue
* Create a new explanation for a match.
* @param value the contribution to the score of the document
* @param description how {@code value} was computed
* @param details sub explanations that contributed to this explanation
*/
public boolean isMatch() {
return (0.0f < getValue());
public static Explanation match(float value, String description, Explanation... details) {
return new Explanation(true, value, description, Arrays.asList(details));
}
/**
* Create a new explanation for a document which does not match.
*/
public static Explanation noMatch(String description, Collection<Explanation> details) {
return new Explanation(false, 0f, description, Collections.emptyList());
}
/**
* Create a new explanation for a document which does not match.
*/
public static Explanation noMatch(String description, Explanation... details) {
return new Explanation(false, 0f, description, Collections.emptyList());
}
private final boolean match; // whether the document matched
private final float value; // the value of this node
private final String description; // what it represents
private final List<Explanation> details; // sub-explanations
/** Create a new explanation */
private Explanation(boolean match, float value, String description, Collection<Explanation> details) {
this.match = match;
this.value = value;
this.description = Objects.requireNonNull(description);
this.details = Collections.unmodifiableList(new ArrayList<>(details));
}
/**
* Indicates whether or not this Explanation models a match.
*/
public boolean isMatch() {
return match;
}
/** The value assigned to this explanation node. */
public float getValue() { return value; }
/** Sets the value assigned to this explanation node. */
public void setValue(float value) { this.value = value; }
/** A description of this explanation node. */
public String getDescription() { return description; }
/** Sets the description of this explanation node. */
public void setDescription(String description) {
this.description = description;
}
/**
* A short one line summary which should contain all high level
@ -68,18 +97,9 @@ public class Explanation {
/** The sub-nodes of this explanation node. */
public Explanation[] getDetails() {
if (details == null)
return null;
return details.toArray(new Explanation[0]);
}
/** Adds a sub-node to this explanation node. */
public void addDetail(Explanation detail) {
if (details == null)
details = new ArrayList<>();
details.add(detail);
}
/** Render an explanation as text. */
@Override
public String toString() {
@ -94,10 +114,8 @@ public class Explanation {
buffer.append("\n");
Explanation[] details = getDetails();
if (details != null) {
for (int i = 0 ; i < details.length; i++) {
buffer.append(details[i].toString(depth+1));
}
for (int i = 0 ; i < details.length; i++) {
buffer.append(details[i].toString(depth+1));
}
return buffer.toString();
@ -114,10 +132,8 @@ public class Explanation {
buffer.append("<br />\n");
Explanation[] details = getDetails();
if (details != null) {
for (int i = 0 ; i < details.length; i++) {
buffer.append(details[i].toHtml());
}
for (int i = 0 ; i < details.length; i++) {
buffer.append(details[i].toHtml());
}
buffer.append("</li>\n");

View File

@ -95,14 +95,12 @@ public abstract class Filter extends Query {
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
final Scorer scorer = scorer(context, context.reader().getLiveDocs());
final boolean match = (scorer != null && scorer.advance(doc) == doc);
final String desc;
if (match) {
assert scorer.score() == 0f;
desc = "Match on id " + doc;
return Explanation.match(0f, "Match on id " + doc);
} else {
desc = "No match on id " + doc;
return Explanation.match(0f, "No match on id " + doc);
}
return new ComplexExplanation(match, 0f, desc);
}
@Override

View File

@ -108,10 +108,7 @@ public class FilteredQuery extends Query {
if (docIdSetIterator.advance(i) == i) {
return inner;
} else {
Explanation result = new Explanation
(0.0f, "failure to match filter: " + f.toString());
result.addDetail(inner);
return result;
return Explanation.noMatch("failure to match filter: " + f.toString(), inner);
}
}

View File

@ -18,6 +18,8 @@ package org.apache.lucene.search;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.lucene.index.IndexReader;
@ -119,15 +121,12 @@ public final class MatchAllDocsQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) {
// explain query weight
Explanation queryExpl = new ComplexExplanation
(true, queryWeight, "MatchAllDocsQuery, product of:");
List<Explanation> subs = new ArrayList<>();
if (getBoost() != 1.0f) {
queryExpl.addDetail(new Explanation(getBoost(),"boost"));
subs.add(Explanation.match(getBoost(),"boost"));
}
queryExpl.addDetail(new Explanation(queryNorm,"queryNorm"));
return queryExpl;
subs.add(Explanation.match(queryNorm, "queryNorm"));
return Explanation.match(queryWeight, "MatchAllDocsQuery, product of:", subs);
}
}

View File

@ -244,17 +244,16 @@ public class MultiPhraseQuery extends Query {
if (newDoc == doc) {
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
SimScorer docScorer = similarity.simScorer(stats, context);
ComplexExplanation result = new ComplexExplanation();
result.setDescription("weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc, new Explanation(freq, "phraseFreq=" + freq));
result.addDetail(scoreExplanation);
result.setValue(scoreExplanation.getValue());
result.setMatch(true);
return result;
Explanation freqExplanation = Explanation.match(freq, "phraseFreq=" + freq);
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
return Explanation.match(
scoreExplanation.getValue(),
"weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -341,17 +341,16 @@ public class PhraseQuery extends Query {
if (newDoc == doc) {
float freq = slop == 0 ? scorer.freq() : ((SloppyPhraseScorer)scorer).sloppyFreq();
SimScorer docScorer = similarity.simScorer(stats, context);
ComplexExplanation result = new ComplexExplanation();
result.setDescription("weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc, new Explanation(freq, "phraseFreq=" + freq));
result.addDetail(scoreExplanation);
result.setValue(scoreExplanation.getValue());
result.setMatch(true);
return result;
Explanation freqExplanation = Explanation.match(freq, "phraseFreq=" + freq);
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
return Explanation.match(
scoreExplanation.getValue(),
"weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -17,14 +17,13 @@ package org.apache.lucene.search;
* limitations under the License.
*/
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.LeafReaderContext;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
/** A {@link Rescorer} that uses a provided Query to assign
* scores to the first-pass hits.
*
@ -151,22 +150,16 @@ public abstract class QueryRescorer extends Rescorer {
score = combine(firstPassExplanation.getValue(), true, secondPassScore.floatValue());
}
Explanation result = new Explanation(score, "combined first and second pass score using " + getClass());
Explanation first = new Explanation(firstPassExplanation.getValue(), "first pass score");
first.addDetail(firstPassExplanation);
result.addDetail(first);
Explanation first = Explanation.match(firstPassExplanation.getValue(), "first pass score", firstPassExplanation);
Explanation second;
if (secondPassScore == null) {
second = new Explanation(0.0f, "no second pass score");
second = Explanation.noMatch("no second pass score");
} else {
second = new Explanation(secondPassScore, "second pass score");
second = Explanation.match(secondPassScore, "second pass score", secondPassExplanation);
}
second.addDetail(secondPassExplanation);
result.addDetail(second);
return result;
return Explanation.match(score, "combined first and second pass score using " + getClass(), first, second);
}
/** Sugar API, calling {#rescore} using a simple linear

View File

@ -18,6 +18,7 @@ package org.apache.lucene.search;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
@ -98,23 +99,22 @@ public class SortRescorer extends Rescorer {
TopDocs hits = rescore(searcher, oneHit, 1);
assert hits.totalHits == 1;
// TODO: if we could ask the Sort to explain itself then
// we wouldn't need the separate ExpressionRescorer...
Explanation result = new Explanation(0.0f, "sort field values for sort=" + sort.toString());
List<Explanation> subs = new ArrayList<>();
// Add first pass:
Explanation first = new Explanation(firstPassExplanation.getValue(), "first pass score");
first.addDetail(firstPassExplanation);
result.addDetail(first);
Explanation first = Explanation.match(firstPassExplanation.getValue(), "first pass score", firstPassExplanation);
subs.add(first);
FieldDoc fieldDoc = (FieldDoc) hits.scoreDocs[0];
// Add sort values:
SortField[] sortFields = sort.getSort();
for(int i=0;i<sortFields.length;i++) {
result.addDetail(new Explanation(0.0f, "sort field " + sortFields[i].toString() + " value=" + fieldDoc.fields[i]));
subs.add(Explanation.match(0.0f, "sort field " + sortFields[i].toString() + " value=" + fieldDoc.fields[i]));
}
return result;
// TODO: if we could ask the Sort to explain itself then
// we wouldn't need the separate ExpressionRescorer...
return Explanation.match(0.0f, "sort field values for sort=" + sort.toString(), subs);
}
}

View File

@ -126,18 +126,16 @@ public class TermQuery extends Query {
if (newDoc == doc) {
float freq = scorer.freq();
SimScorer docScorer = similarity.simScorer(stats, context);
ComplexExplanation result = new ComplexExplanation();
result.setDescription("weight(" + getQuery() + " in " + doc + ") ["
+ similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc,
new Explanation(freq, "termFreq=" + freq));
result.addDetail(scoreExplanation);
result.setValue(scoreExplanation.getValue());
result.setMatch(true);
return result;
Explanation freqExplanation = Explanation.match(freq, "termFreq=" + freq);
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
return Explanation.match(
scoreExplanation.getValue(),
"weight(" + getQuery() + " in " + doc + ") ["
+ similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -56,10 +56,9 @@ public abstract class PayloadFunction {
public abstract float docScore(int docId, String field, int numPayloadsSeen, float payloadScore);
public Explanation explain(int docId, String field, int numPayloadsSeen, float payloadScore){
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ".docScore()");
result.setValue(docScore(docId, field, numPayloadsSeen, payloadScore));
return result;
return Explanation.match(
docScore(docId, field, numPayloadsSeen, payloadScore),
getClass().getSimpleName() + ".docScore()");
};
@Override

View File

@ -22,7 +22,6 @@ import java.util.Collection;
import java.util.Iterator;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
@ -161,26 +160,25 @@ public class PayloadNearQuery extends SpanNearQuery {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {
float freq = scorer.freq();
Explanation freqExplanation = Explanation.match(freq, "phraseFreq=" + freq);
SimScorer docScorer = similarity.simScorer(stats, context);
Explanation expl = new Explanation();
expl.setDescription("weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc, new Explanation(freq, "phraseFreq=" + freq));
expl.addDetail(scoreExplanation);
expl.setValue(scoreExplanation.getValue());
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
Explanation expl = Explanation.match(
scoreExplanation.getValue(),
"weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
String field = ((SpanQuery)getQuery()).getField();
// now the payloads part
Explanation payloadExpl = function.explain(doc, field, scorer.payloadsSeen, scorer.payloadScore);
// combined
ComplexExplanation result = new ComplexExplanation();
result.addDetail(expl);
result.addDetail(payloadExpl);
result.setValue(expl.getValue() * payloadExpl.getValue());
result.setDescription("PayloadNearQuery, product of:");
return result;
return Explanation.match(
expl.getValue() * payloadExpl.getValue(),
"PayloadNearQuery, product of:",
expl, payloadExpl);
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -23,18 +23,16 @@ import java.util.Objects;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.search.spans.Spans;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanScorer;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.search.spans.SpanWeight;
import org.apache.lucene.search.spans.Spans;
import org.apache.lucene.search.spans.TermSpans;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@ -179,12 +177,13 @@ public class PayloadTermQuery extends SpanTermQuery {
int newDoc = scorer.advance(doc);
if (newDoc == doc) {
float freq = scorer.sloppyFreq();
Explanation freqExplanation = Explanation.match(freq, "phraseFreq=" + freq);
SimScorer docScorer = similarity.simScorer(stats, context);
Explanation expl = new Explanation();
expl.setDescription("weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc, new Explanation(freq, "phraseFreq=" + freq));
expl.addDetail(scoreExplanation);
expl.setValue(scoreExplanation.getValue());
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
Explanation expl = Explanation.match(
scoreExplanation.getValue(),
"weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
// now the payloads part
// QUESTION: Is there a way to avoid this skipTo call? We need to know
// whether to load the payload or not
@ -192,25 +191,18 @@ public class PayloadTermQuery extends SpanTermQuery {
// would be a good idea
String field = ((SpanQuery)getQuery()).getField();
Explanation payloadExpl = function.explain(doc, field, scorer.payloadsSeen, scorer.payloadScore);
payloadExpl.setValue(scorer.getPayloadScore());
// combined
ComplexExplanation result = new ComplexExplanation();
if (includeSpanScore) {
result.addDetail(expl);
result.addDetail(payloadExpl);
result.setValue(expl.getValue() * payloadExpl.getValue());
result.setDescription("btq, product of:");
return Explanation.match(
expl.getValue() * payloadExpl.getValue(),
"btq, product of:", expl, payloadExpl);
} else {
result.addDetail(payloadExpl);
result.setValue(payloadExpl.getValue());
result.setDescription("btq(includeSpanScore=false), result of:");
return Explanation.match(payloadExpl.getValue(), "btq(includeSpanScore=false), result of:", payloadExpl);
}
result.setMatch(true); // LUCENE-1303
return result;
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -56,7 +56,7 @@ public abstract class AfterEffect {
@Override
public final Explanation explain(BasicStats stats, float tfn) {
return new Explanation(1, "no aftereffect");
return Explanation.match(1, "no aftereffect");
}
@Override

View File

@ -37,13 +37,12 @@ public class AfterEffectB extends AfterEffect {
@Override
public final Explanation explain(BasicStats stats, float tfn) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(score(stats, tfn));
result.addDetail(new Explanation(tfn, "tfn"));
result.addDetail(new Explanation(stats.getTotalTermFreq(), "totalTermFreq"));
result.addDetail(new Explanation(stats.getDocFreq(), "docFreq"));
return result;
return Explanation.match(
score(stats, tfn),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(tfn, "tfn"),
Explanation.match(stats.getTotalTermFreq(), "totalTermFreq"),
Explanation.match(stats.getDocFreq(), "docFreq"));
}
@Override

View File

@ -35,11 +35,10 @@ public class AfterEffectL extends AfterEffect {
@Override
public final Explanation explain(BasicStats stats, float tfn) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(score(stats, tfn));
result.addDetail(new Explanation(tfn, "tfn"));
return result;
return Explanation.match(
score(stats, tfn),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(tfn, "tfn"));
}
@Override

View File

@ -18,9 +18,11 @@ package org.apache.lucene.search.similarities;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Explanation;
@ -166,7 +168,7 @@ public class BM25Similarity extends Similarity {
final long df = termStats.docFreq();
final long max = collectionStats.maxDoc();
final float idf = idf(df, max);
return new Explanation(idf, "idf(docFreq=" + df + ", maxDocs=" + max + ")");
return Explanation.match(idf, "idf(docFreq=" + df + ", maxDocs=" + max + ")");
}
/**
@ -185,16 +187,14 @@ public class BM25Similarity extends Similarity {
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
final long max = collectionStats.maxDoc();
float idf = 0.0f;
final Explanation exp = new Explanation();
exp.setDescription("idf(), sum of:");
List<Explanation> details = new ArrayList<>();
for (final TermStatistics stat : termStats ) {
final long df = stat.docFreq();
final float termIdf = idf(df, max);
exp.addDetail(new Explanation(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")"));
details.add(Explanation.match(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")"));
idf += termIdf;
}
exp.setValue(idf);
return exp;
return Explanation.match(idf, "idf(), sum of:", details);
}
@Override
@ -292,34 +292,38 @@ public class BM25Similarity extends Similarity {
this.weight = idf.getValue() * queryBoost * topLevelBoost;
}
}
private Explanation explainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
Explanation result = new Explanation();
result.setDescription("score(doc="+doc+",freq="+freq+"), product of:");
Explanation boostExpl = new Explanation(stats.queryBoost * stats.topLevelBoost, "boost");
if (boostExpl.getValue() != 1.0f)
result.addDetail(boostExpl);
result.addDetail(stats.idf);
Explanation tfNormExpl = new Explanation();
tfNormExpl.setDescription("tfNorm, computed from:");
tfNormExpl.addDetail(freq);
tfNormExpl.addDetail(new Explanation(k1, "parameter k1"));
private Explanation explainTFNorm(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
List<Explanation> subs = new ArrayList<>();
subs.add(freq);
subs.add(Explanation.match(k1, "parameter k1"));
if (norms == null) {
tfNormExpl.addDetail(new Explanation(0, "parameter b (norms omitted for field)"));
tfNormExpl.setValue((freq.getValue() * (k1 + 1)) / (freq.getValue() + k1));
subs.add(Explanation.match(0, "parameter b (norms omitted for field)"));
return Explanation.match(
(freq.getValue() * (k1 + 1)) / (freq.getValue() + k1),
"parameter b (norms omitted for field)", subs);
} else {
float doclen = decodeNormValue((byte)norms.get(doc));
tfNormExpl.addDetail(new Explanation(b, "parameter b"));
tfNormExpl.addDetail(new Explanation(stats.avgdl, "avgFieldLength"));
tfNormExpl.addDetail(new Explanation(doclen, "fieldLength"));
tfNormExpl.setValue((freq.getValue() * (k1 + 1)) / (freq.getValue() + k1 * (1 - b + b * doclen/stats.avgdl)));
subs.add(Explanation.match(b, "parameter b"));
subs.add(Explanation.match(stats.avgdl, "avgFieldLength"));
subs.add(Explanation.match(doclen, "fieldLength"));
return Explanation.match(
(freq.getValue() * (k1 + 1)) / (freq.getValue() + k1 * (1 - b + b * doclen/stats.avgdl)),
"tfNorm, computed from:", subs);
}
result.addDetail(tfNormExpl);
result.setValue(boostExpl.getValue() * stats.idf.getValue() * tfNormExpl.getValue());
return result;
}
private Explanation explainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
Explanation boostExpl = Explanation.match(stats.queryBoost * stats.topLevelBoost, "boost");
List<Explanation> subs = new ArrayList<>();
if (boostExpl.getValue() != 1.0f)
subs.add(boostExpl);
subs.add(stats.idf);
Explanation tfNormExpl = explainTFNorm(doc, freq, stats, norms);
subs.add(tfNormExpl);
return Explanation.match(
boostExpl.getValue() * stats.idf.getValue() * tfNormExpl.getValue(),
"score(doc="+doc+",freq="+freq+"), product of:", subs);
}
@Override

View File

@ -47,15 +47,11 @@ public abstract class BasicModel {
* override this method.</p>
*/
public Explanation explain(BasicStats stats, float tfn) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(score(stats, tfn));
result.addDetail(new Explanation(tfn, "tfn"));
result.addDetail(
new Explanation(stats.getNumberOfDocuments(), "numberOfDocuments"));
result.addDetail(
new Explanation(stats.getTotalTermFreq(), "totalTermFreq"));
return result;
return Explanation.match(
score(stats, tfn),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(stats.getNumberOfDocuments(), "numberOfDocuments"),
Explanation.match(stats.getTotalTermFreq(), "totalTermFreq"));
}
/**

View File

@ -38,15 +38,11 @@ public class BasicModelIn extends BasicModel {
@Override
public final Explanation explain(BasicStats stats, float tfn) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(score(stats, tfn));
result.addDetail(new Explanation(tfn, "tfn"));
result.addDetail(
new Explanation(stats.getNumberOfDocuments(), "numberOfDocuments"));
result.addDetail(
new Explanation(stats.getDocFreq(), "docFreq"));
return result;
return Explanation.match(
score(stats, tfn),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(stats.getNumberOfDocuments(), "numberOfDocuments"),
Explanation.match(stats.getDocFreq(), "docFreq"));
}
@Override

View File

@ -17,6 +17,8 @@ package org.apache.lucene.search.similarities;
* limitations under the License.
*/
import java.util.List;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.similarities.AfterEffect.NoAfterEffect;
import org.apache.lucene.search.similarities.Normalization.NoNormalization;
@ -112,17 +114,17 @@ public class DFRSimilarity extends SimilarityBase {
}
@Override
protected void explain(Explanation expl,
protected void explain(List<Explanation> subs,
BasicStats stats, int doc, float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
expl.addDetail(new Explanation(stats.getTotalBoost(), "boost"));
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
}
Explanation normExpl = normalization.explain(stats, freq, docLen);
float tfn = normExpl.getValue();
expl.addDetail(normExpl);
expl.addDetail(basicModel.explain(stats, tfn));
expl.addDetail(afterEffect.explain(stats, tfn));
subs.add(normExpl);
subs.add(basicModel.explain(stats, tfn));
subs.add(afterEffect.explain(stats, tfn));
}
@Override

View File

@ -39,7 +39,7 @@ public abstract class Distribution {
/** Explains the score. Returns the name of the model only, since
* both {@code tfn} and {@code lambda} are explained elsewhere. */
public Explanation explain(BasicStats stats, float tfn, float lambda) {
return new Explanation(
return Explanation.match(
score(stats, tfn, lambda), getClass().getSimpleName());
}

View File

@ -17,6 +17,8 @@ package org.apache.lucene.search.similarities;
* limitations under the License.
*/
import java.util.List;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.similarities.Normalization.NoNormalization;
@ -103,16 +105,15 @@ public class IBSimilarity extends SimilarityBase {
@Override
protected void explain(
Explanation expl, BasicStats stats, int doc, float freq, float docLen) {
List<Explanation> subs, BasicStats stats, int doc, float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
expl.addDetail(new Explanation(stats.getTotalBoost(), "boost"));
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
}
Explanation normExpl = normalization.explain(stats, freq, docLen);
Explanation lambdaExpl = lambda.explain(stats);
expl.addDetail(normExpl);
expl.addDetail(lambdaExpl);
expl.addDetail(distribution.explain(
stats, normExpl.getValue(), lambdaExpl.getValue()));
subs.add(normExpl);
subs.add(lambdaExpl);
subs.add(distribution.explain(stats, normExpl.getValue(), lambdaExpl.getValue()));
}
/**

View File

@ -17,6 +17,7 @@ package org.apache.lucene.search.similarities;
* limitations under the License.
*/
import java.util.List;
import java.util.Locale;
import org.apache.lucene.search.Explanation;
@ -70,21 +71,21 @@ public class LMDirichletSimilarity extends LMSimilarity {
}
@Override
protected void explain(Explanation expl, BasicStats stats, int doc,
protected void explain(List<Explanation> subs, BasicStats stats, int doc,
float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
expl.addDetail(new Explanation(stats.getTotalBoost(), "boost"));
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
}
expl.addDetail(new Explanation(mu, "mu"));
Explanation weightExpl = new Explanation();
weightExpl.setValue((float)Math.log(1 + freq /
(mu * ((LMStats)stats).getCollectionProbability())));
weightExpl.setDescription("term weight");
expl.addDetail(weightExpl);
expl.addDetail(new Explanation(
subs.add(Explanation.match(mu, "mu"));
Explanation weightExpl = Explanation.match(
(float)Math.log(1 + freq /
(mu * ((LMStats)stats).getCollectionProbability())),
"term weight");
subs.add(weightExpl);
subs.add(Explanation.match(
(float)Math.log(mu / (docLen + mu)), "document norm"));
super.explain(expl, stats, doc, freq, docLen);
super.explain(subs, stats, doc, freq, docLen);
}
/** Returns the &mu; parameter. */

View File

@ -17,6 +17,7 @@ package org.apache.lucene.search.similarities;
* limitations under the License.
*/
import java.util.List;
import java.util.Locale;
import org.apache.lucene.search.Explanation;
@ -58,13 +59,13 @@ public class LMJelinekMercerSimilarity extends LMSimilarity {
}
@Override
protected void explain(Explanation expl, BasicStats stats, int doc,
protected void explain(List<Explanation> subs, BasicStats stats, int doc,
float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
expl.addDetail(new Explanation(stats.getTotalBoost(), "boost"));
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
}
expl.addDetail(new Explanation(lambda, "lambda"));
super.explain(expl, stats, doc, freq, docLen);
subs.add(Explanation.match(lambda, "lambda"));
super.explain(subs, stats, doc, freq, docLen);
}
/** Returns the &lambda; parameter. */

View File

@ -17,6 +17,7 @@ package org.apache.lucene.search.similarities;
* limitations under the License.
*/
import java.util.List;
import java.util.Locale;
import org.apache.lucene.search.CollectionStatistics;
@ -69,9 +70,9 @@ public abstract class LMSimilarity extends SimilarityBase {
}
@Override
protected void explain(Explanation expl, BasicStats stats, int doc,
protected void explain(List<Explanation> subExpls, BasicStats stats, int doc,
float freq, float docLen) {
expl.addDetail(new Explanation(collectionModel.computeProbability(stats),
subExpls.add(Explanation.match(collectionModel.computeProbability(stats),
"collection probability"));
}

View File

@ -35,14 +35,11 @@ public class LambdaDF extends Lambda {
@Override
public final Explanation explain(BasicStats stats) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(lambda(stats));
result.addDetail(
new Explanation(stats.getDocFreq(), "docFreq"));
result.addDetail(
new Explanation(stats.getNumberOfDocuments(), "numberOfDocuments"));
return result;
return Explanation.match(
lambda(stats),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(stats.getDocFreq(), "docFreq"),
Explanation.match(stats.getNumberOfDocuments(), "numberOfDocuments"));
}
@Override

View File

@ -35,14 +35,11 @@ public class LambdaTTF extends Lambda {
@Override
public final Explanation explain(BasicStats stats) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(lambda(stats));
result.addDetail(
new Explanation(stats.getTotalTermFreq(), "totalTermFreq"));
result.addDetail(
new Explanation(stats.getNumberOfDocuments(), "numberOfDocuments"));
return result;
return Explanation.match(
lambda(stats),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(stats.getTotalTermFreq(), "totalTermFreq"),
Explanation.match(stats.getNumberOfDocuments(), "numberOfDocuments"));
}
@Override

View File

@ -18,9 +18,11 @@ package org.apache.lucene.search.similarities;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.TermStatistics;
@ -83,11 +85,11 @@ public class MultiSimilarity extends Similarity {
@Override
public Explanation explain(int doc, Explanation freq) {
Explanation expl = new Explanation(score(doc, freq.getValue()), "sum of:");
List<Explanation> subs = new ArrayList<>();
for (SimScorer subScorer : subScorers) {
expl.addDetail(subScorer.explain(doc, freq));
subs.add(subScorer.explain(doc, freq));
}
return expl;
return Explanation.match(score(doc, freq.getValue()), "sum of:", subs);
}
@Override

View File

@ -45,14 +45,12 @@ public abstract class Normalization {
* Subclasses that use other statistics must override this method.</p>
*/
public Explanation explain(BasicStats stats, float tf, float len) {
Explanation result = new Explanation();
result.setDescription(getClass().getSimpleName() + ", computed from: ");
result.setValue(tfn(stats, tf, len));
result.addDetail(new Explanation(tf, "tf"));
result.addDetail(
new Explanation(stats.getAvgFieldLength(), "avgFieldLength"));
result.addDetail(new Explanation(len, "len"));
return result;
return Explanation.match(
tfn(stats, tf, len),
getClass().getSimpleName() + ", computed from: ",
Explanation.match(tf, "tf"),
Explanation.match(stats.getAvgFieldLength(), "avgFieldLength"),
Explanation.match(len, "len"));
}
/** Implementation used when there is no normalization. */
@ -68,7 +66,7 @@ public abstract class Normalization {
@Override
public final Explanation explain(BasicStats stats, float tf, float len) {
return new Explanation(1, "no normalization");
return Explanation.match(1, "no normalization");
}
@Override

View File

@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SmallFloat;
import java.io.IOException;
import java.util.Collections;
/**
* Similarity defines the components of Lucene scoring.
@ -210,10 +211,10 @@ public abstract class Similarity {
* @return document's score
*/
public Explanation explain(int doc, Explanation freq) {
Explanation result = new Explanation(score(doc, freq.getValue()),
"score(doc=" + doc + ",freq=" + freq.getValue() +"), with freq of:");
result.addDetail(freq);
return result;
return Explanation.match(
score(doc, freq.getValue()),
"score(doc=" + doc + ",freq=" + freq.getValue() +"), with freq of:",
Collections.singleton(freq));
}
}

View File

@ -18,9 +18,11 @@ package org.apache.lucene.search.similarities;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Explanation;
@ -32,7 +34,7 @@ import org.apache.lucene.util.SmallFloat;
* A subclass of {@code Similarity} that provides a simplified API for its
* descendants. Subclasses are only required to implement the {@link #score}
* and {@link #toString()} methods. Implementing
* {@link #explain(Explanation, BasicStats, int, float, float)} is optional,
* {@link #explain(List, BasicStats, int, float, float)} is optional,
* inasmuch as SimilarityBase already provides a basic explanation of the score
* and the term frequency. However, implementers of a subclass are encouraged to
* include as much detail about the scoring method as possible.
@ -152,14 +154,14 @@ public abstract class SimilarityBase extends Similarity {
* clauses to explain details of their scoring formulae.
* <p>The default implementation does nothing.</p>
*
* @param expl the explanation to extend with details.
* @param subExpls the list of details of the explanation to extend
* @param stats the corpus level statistics.
* @param doc the document id.
* @param freq the term frequency.
* @param docLen the document length.
*/
protected void explain(
Explanation expl, BasicStats stats, int doc, float freq, float docLen) {}
List<Explanation> subExpls, BasicStats stats, int doc, float freq, float docLen) {}
/**
* Explains the score. The implementation here provides a basic explanation
@ -168,7 +170,7 @@ public abstract class SimilarityBase extends Similarity {
* attaches the score (computed via the {@link #score(BasicStats, float, float)}
* method) and the explanation for the term frequency. Subclasses content with
* this format may add additional details in
* {@link #explain(Explanation, BasicStats, int, float, float)}.
* {@link #explain(List, BasicStats, int, float, float)}.
*
* @param stats the corpus level statistics.
* @param doc the document id.
@ -178,15 +180,13 @@ public abstract class SimilarityBase extends Similarity {
*/
protected Explanation explain(
BasicStats stats, int doc, Explanation freq, float docLen) {
Explanation result = new Explanation();
result.setValue(score(stats, freq.getValue(), docLen));
result.setDescription("score(" + getClass().getSimpleName() +
", doc=" + doc + ", freq=" + freq.getValue() +"), computed from:");
result.addDetail(freq);
List<Explanation> subs = new ArrayList<>();
explain(subs, stats, doc, freq.getValue(), docLen);
explain(result, stats, doc, freq.getValue(), docLen);
return result;
return Explanation.match(
score(stats, freq.getValue(), docLen),
"score(" + getClass().getSimpleName() + ", doc=" + doc + ", freq=" + freq.getValue() +"), computed from:",
subs);
}
@Override

View File

@ -18,9 +18,11 @@ package org.apache.lucene.search.similarities;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.Explanation;
@ -582,7 +584,7 @@ public abstract class TFIDFSimilarity extends Similarity {
final long df = termStats.docFreq();
final long max = collectionStats.maxDoc();
final float idf = idf(df, max);
return new Explanation(idf, "idf(docFreq=" + df + ", maxDocs=" + max + ")");
return Explanation.match(idf, "idf(docFreq=" + df + ", maxDocs=" + max + ")");
}
/**
@ -601,16 +603,14 @@ public abstract class TFIDFSimilarity extends Similarity {
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
final long max = collectionStats.maxDoc();
float idf = 0.0f;
final Explanation exp = new Explanation();
exp.setDescription("idf(), sum of:");
List<Explanation> subs = new ArrayList<>();
for (final TermStatistics stat : termStats ) {
final long df = stat.docFreq();
final float termIdf = idf(df, max);
exp.addDetail(new Explanation(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")"));
subs.add(Explanation.match(termIdf, "idf(docFreq=" + df + ", maxDocs=" + max + ")"));
idf += termIdf;
}
exp.setValue(idf);
return exp;
return Explanation.match(idf, "idf(), sum of:", subs);
}
/** Computes a score factor based on a term's document frequency (the number
@ -764,58 +764,43 @@ public abstract class TFIDFSimilarity extends Similarity {
}
}
private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
Explanation result = new Explanation();
result.setDescription("score(doc="+doc+",freq="+freq.getValue()+"), product of:");
private Explanation explainQuery(IDFStats stats) {
List<Explanation> subs = new ArrayList<>();
// explain query weight
Explanation queryExpl = new Explanation();
queryExpl.setDescription("queryWeight, product of:");
Explanation boostExpl = new Explanation(stats.queryBoost, "boost");
Explanation boostExpl = Explanation.match(stats.queryBoost, "boost");
if (stats.queryBoost != 1.0f)
queryExpl.addDetail(boostExpl);
queryExpl.addDetail(stats.idf);
subs.add(boostExpl);
subs.add(stats.idf);
Explanation queryNormExpl = new Explanation(stats.queryNorm,"queryNorm");
queryExpl.addDetail(queryNormExpl);
Explanation queryNormExpl = Explanation.match(stats.queryNorm,"queryNorm");
subs.add(queryNormExpl);
queryExpl.setValue(boostExpl.getValue() *
stats.idf.getValue() *
queryNormExpl.getValue());
return Explanation.match(
boostExpl.getValue() * stats.idf.getValue() * queryNormExpl.getValue(),
"queryWeight, product of:", subs);
}
result.addDetail(queryExpl);
private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
Explanation fieldNormExpl = Explanation.match(
norms != null ? decodeNormValue(norms.get(doc)) : 1.0f,
"fieldNorm(doc=" + doc + ")");
// explain field weight
Explanation fieldExpl = new Explanation();
fieldExpl.setDescription("fieldWeight in "+doc+
", product of:");
return Explanation.match(
tfExplanation.getValue() * stats.idf.getValue() * fieldNormExpl.getValue(),
"fieldWeight in " + doc + ", product of:",
tfExplanation, stats.idf, fieldNormExpl);
}
Explanation tfExplanation = new Explanation();
tfExplanation.setValue(tf(freq.getValue()));
tfExplanation.setDescription("tf(freq="+freq.getValue()+"), with freq of:");
tfExplanation.addDetail(freq);
fieldExpl.addDetail(tfExplanation);
fieldExpl.addDetail(stats.idf);
Explanation fieldNormExpl = new Explanation();
float fieldNorm = norms != null ? decodeNormValue(norms.get(doc)) : 1.0f;
fieldNormExpl.setValue(fieldNorm);
fieldNormExpl.setDescription("fieldNorm(doc="+doc+")");
fieldExpl.addDetail(fieldNormExpl);
fieldExpl.setValue(tfExplanation.getValue() *
stats.idf.getValue() *
fieldNormExpl.getValue());
result.addDetail(fieldExpl);
// combine them
result.setValue(queryExpl.getValue() * fieldExpl.getValue());
if (queryExpl.getValue() == 1.0f)
private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
Explanation queryExpl = explainQuery(stats);
Explanation fieldExpl = explainField(doc, freq, stats, norms);
if (queryExpl.getValue() == 1f) {
return fieldExpl;
return result;
}
return Explanation.match(
queryExpl.getValue() * fieldExpl.getValue(),
"score(doc="+doc+",freq="+freq.getValue()+"), product of:",
queryExpl, fieldExpl);
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
@ -110,16 +109,14 @@ public class SpanWeight extends Weight {
if (newDoc == doc) {
float freq = scorer.sloppyFreq();
SimScorer docScorer = similarity.simScorer(stats, context);
ComplexExplanation result = new ComplexExplanation();
result.setDescription("weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:");
Explanation scoreExplanation = docScorer.explain(doc, new Explanation(freq, "phraseFreq=" + freq));
result.addDetail(scoreExplanation);
result.setValue(scoreExplanation.getValue());
result.setMatch(true);
return result;
Explanation freqExplanation = Explanation.match(freq, "phraseFreq=" + freq);
Explanation scoreExplanation = docScorer.explain(doc, freqExplanation);
return Explanation.match(scoreExplanation.getValue(),
"weight("+getQuery()+" in "+doc+") [" + similarity.getClass().getSimpleName() + "], result of:",
scoreExplanation);
}
}
return new ComplexExplanation(false, 0.0f, "no matching term");
return Explanation.noMatch("no matching term");
}
}

View File

@ -56,7 +56,7 @@ public class TestOmitTf extends LuceneTestCase {
@Override public float sloppyFreq(int distance) { return 2.0f; }
@Override public float idf(long docFreq, long numDocs) { return 1.0f; }
@Override public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats) {
return new Explanation(1.0f, "Inexplicable");
return Explanation.match(1.0f, "Inexplicable");
}
@Override public float scorePayload(int doc, int start, int end, BytesRef payload) { return 1.0f; }
}

View File

@ -180,12 +180,11 @@ public class TestDocValuesScoring extends LuceneTestCase {
@Override
public Explanation explain(int doc, Explanation freq) {
Explanation boostExplanation = new Explanation(Float.intBitsToFloat((int)values.get(doc)), "indexDocValue(" + boostField + ")");
Explanation boostExplanation = Explanation.match(Float.intBitsToFloat((int)values.get(doc)), "indexDocValue(" + boostField + ")");
Explanation simExplanation = sub.explain(doc, freq);
Explanation expl = new Explanation(boostExplanation.getValue() * simExplanation.getValue(), "product of:");
expl.addDetail(boostExplanation);
expl.addDetail(simExplanation);
return expl;
return Explanation.match(
boostExplanation.getValue() * simExplanation.getValue(),
"product of:", boostExplanation, simExplanation);
}
};
}

View File

@ -337,7 +337,7 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
searcher.setSimilarity(new DefaultSimilarity() {
@Override
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
return new Explanation(10f, "just a test");
return Explanation.match(10f, "just a test");
}
});

View File

@ -272,7 +272,6 @@ public class TestQueryRescorer extends LuceneTestCase {
assertTrue(s.contains("first pass score"));
assertTrue(s.contains("no second pass score"));
assertFalse(s.contains("= second pass score"));
assertTrue(s.contains("NON-MATCH"));
assertEquals(hits2.scoreDocs[1].score, explain.getValue(), 0.0f);
r.close();

View File

@ -48,7 +48,7 @@ public class TestSimilarity extends LuceneTestCase {
@Override public float sloppyFreq(int distance) { return 2.0f; }
@Override public float idf(long docFreq, long numDocs) { return 1.0f; }
@Override public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics[] stats) {
return new Explanation(1.0f, "Inexplicable");
return Explanation.match(1.0f, "Inexplicable");
}
}

View File

@ -101,7 +101,7 @@ public class TestSortRescorer extends LuceneTestCase {
// Confirm the explanation breaks out the individual
// sort fields:
assertTrue(expl.contains("= sort field <int: \"popularity\">! value=20"));
assertTrue(expl, expl.contains("= sort field <int: \"popularity\">! value=20"));
// Confirm the explanation includes first pass details:
assertTrue(expl.contains("= first pass score"));

View File

@ -341,7 +341,7 @@ public class TestPayloadNearQuery extends LuceneTestCase {
// idf used for phrase queries
@Override
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics[] termStats) {
return new Explanation(1.0f, "Inexplicable");
return Explanation.match(1.0f, "Inexplicable");
}
}
}

View File

@ -194,7 +194,7 @@ public class TestSimilarityBase extends LuceneTestCase {
toTermStats(stats));
float score = sim.score(realStats, freq, docLen);
float explScore = sim.explain(
realStats, 1, new Explanation(freq, "freq"), docLen).getValue();
realStats, 1, Explanation.match(freq, "freq"), docLen).getValue();
assertFalse("Score infinite: " + sim.toString(), Float.isInfinite(score));
assertFalse("Score NaN: " + sim.toString(), Float.isNaN(score));
assertTrue("Score negative: " + sim.toString(), score >= 0);

View File

@ -18,6 +18,8 @@ package org.apache.lucene.expressions;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -57,7 +59,7 @@ class ExpressionRescorer extends SortRescorer {
@Override
public Explanation explain(IndexSearcher searcher, Explanation firstPassExplanation, int docID) throws IOException {
Explanation result = super.explain(searcher, firstPassExplanation, docID);
Explanation superExpl = super.explain(searcher, firstPassExplanation, docID);
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
int subReader = ReaderUtil.subIndex(docID, leaves);
@ -71,11 +73,12 @@ class ExpressionRescorer extends SortRescorer {
context.put("scorer", fakeScorer);
List<Explanation> subs = new ArrayList<>(Arrays.asList(superExpl.getDetails()));
for(String variable : expression.variables) {
result.addDetail(new Explanation((float) bindings.getValueSource(variable).getValues(context, readerContext).doubleVal(docIDInSegment),
subs.add(Explanation.match((float) bindings.getValueSource(variable).getValues(context, readerContext).doubleVal(docIDInSegment),
"variable \"" + variable + "\""));
}
return result;
return Explanation.match(superExpl.getValue(), superExpl.getDescription(), subs);
}
}

View File

@ -17,13 +17,15 @@ package org.apache.lucene.search.join;
* limitations under the License.
*/
import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -36,9 +38,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongBitSet;
import org.apache.lucene.util.LongValues;
import java.io.IOException;
import java.util.Set;
final class GlobalOrdinalsQuery extends Query {
// All the ords of matching docs found with OrdinalsCollector.
@ -121,10 +120,10 @@ final class GlobalOrdinalsQuery extends Query {
int segmentOrd = values.getOrd(doc);
if (segmentOrd != -1) {
BytesRef joinValue = values.lookupOrd(segmentOrd);
return new ComplexExplanation(true, queryNorm, "Score based on join value " + joinValue.utf8ToString());
return Explanation.match(queryNorm, "Score based on join value " + joinValue.utf8ToString());
}
}
return new ComplexExplanation(false, 0.0f, "Not a match");
return Explanation.noMatch("Not a match");
}
@Override

View File

@ -17,13 +17,15 @@ package org.apache.lucene.search.join;
* limitations under the License.
*/
import java.io.IOException;
import java.util.Set;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -35,9 +37,6 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
import java.io.IOException;
import java.util.Set;
final class GlobalOrdinalsWithScoreQuery extends Query {
private final GlobalOrdinalsWithScoreCollector collector;
@ -126,10 +125,10 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
score = collector.score(segmentOrd);
}
BytesRef joinValue = values.lookupOrd(segmentOrd);
return new ComplexExplanation(true, score, "Score based on join value " + joinValue.utf8ToString());
return Explanation.match(score, "Score based on join value " + joinValue.utf8ToString());
}
}
return new ComplexExplanation(false, 0.0f, "Not a match");
return Explanation.noMatch("Not a match");
}
@Override

View File

@ -27,7 +27,6 @@ import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -137,12 +136,12 @@ class TermsIncludingScoreQuery extends Query {
postingsEnum = segmentTermsEnum.postings(null, postingsEnum, PostingsEnum.NONE);
if (postingsEnum.advance(doc) == doc) {
final float score = TermsIncludingScoreQuery.this.scores[ords[i]];
return new ComplexExplanation(true, score, "Score based on join value " + segmentTermsEnum.term().utf8ToString());
return Explanation.match(score, "Score based on join value " + segmentTermsEnum.term().utf8ToString());
}
}
}
}
return new ComplexExplanation(false, 0.0f, "Not a match");
return Explanation.noMatch("Not a match");
}
@Override

View File

@ -17,11 +17,16 @@ package org.apache.lucene.search.join;
* limitations under the License.
*/
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
@ -34,12 +39,6 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Set;
/**
* This query requires that you index
* children and parent docs as a single block, using the
@ -190,7 +189,7 @@ public class ToParentBlockJoinQuery extends Query {
if (scorer != null && scorer.advance(doc) == doc) {
return scorer.explain(context.docBase);
}
return new ComplexExplanation(false, 0.0f, "Not a match");
return Explanation.noMatch("Not a match");
}
}
@ -414,8 +413,7 @@ public class ToParentBlockJoinQuery extends Query {
public Explanation explain(int docBase) throws IOException {
int start = docBase + prevParentDoc + 1; // +1 b/c prevParentDoc is previous parent doc
int end = docBase + parentDoc - 1; // -1 b/c parentDoc is parent doc
return new ComplexExplanation(
true, score(), String.format(Locale.ROOT, "Score based on child doc range from %d to %d", start, end)
return Explanation.match(score(), String.format(Locale.ROOT, "Score based on child doc range from %d to %d", start, end)
);
}

View File

@ -454,7 +454,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
@Override
public Explanation explain(int doc, Explanation freq) {
return new Explanation(Float.intBitsToFloat((int) values.get(doc)),
return Explanation.match(Float.intBitsToFloat((int) values.get(doc)),
"indexDocValue(" + scoreValueField + ")");
}
};

View File

@ -18,9 +18,11 @@ package org.apache.lucene.queries;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader; // for javadocs
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.search.Explanation;
@ -130,12 +132,13 @@ public class CustomScoreProvider {
for (Explanation valSrcExpl : valSrcExpls) {
valSrcScore *= valSrcExpl.getValue();
}
Explanation exp = new Explanation( valSrcScore * subQueryExpl.getValue(), "custom score: product of:");
exp.addDetail(subQueryExpl);
List<Explanation> subs = new ArrayList<>();
subs.add(subQueryExpl);
for (Explanation valSrcExpl : valSrcExpls) {
exp.addDetail(valSrcExpl);
subs.add(valSrcExpl);
}
return exp;
return Explanation.match(valSrcScore * subQueryExpl.getValue(), "custom score: product of:", subs);
}
/**
@ -154,10 +157,7 @@ public class CustomScoreProvider {
if (valSrcExpl != null) {
valSrcScore *= valSrcExpl.getValue();
}
Explanation exp = new Explanation( valSrcScore * subQueryExpl.getValue(), "custom score: product of:");
exp.addDetail(subQueryExpl);
exp.addDetail(valSrcExpl);
return exp;
return Explanation.match(valSrcScore * subQueryExpl.getValue(), "custom score: product of:", subQueryExpl, valSrcExpl);
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
@ -244,7 +243,7 @@ public class CustomScoreQuery extends Query {
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
Explanation explain = doExplain(context, doc);
return explain == null ? new Explanation(0.0f, "no matching docs") : explain;
return explain == null ? Explanation.noMatch("no matching docs") : explain;
}
private Explanation doExplain(LeafReaderContext info, int doc) throws IOException {
@ -259,11 +258,9 @@ public class CustomScoreQuery extends Query {
}
Explanation customExp = CustomScoreQuery.this.getCustomScoreProvider(info).customExplain(doc,subQueryExpl,valSrcExpls);
float sc = queryWeight * customExp.getValue();
Explanation res = new ComplexExplanation(
true, sc, CustomScoreQuery.this.toString() + ", product of:");
res.addDetail(customExp);
res.addDetail(new Explanation(queryWeight, "queryWeight"));
return res;
return Explanation.match(
sc, CustomScoreQuery.this.toString() + ", product of:",
customExp, Explanation.match(queryWeight, "queryWeight"));
}
}

View File

@ -34,7 +34,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
@ -377,19 +376,12 @@ public class TermsQuery extends Query implements Accountable {
final Scorer s = scorer(context, context.reader().getLiveDocs());
final boolean exists = (s != null && s.advance(doc) == doc);
final ComplexExplanation result = new ComplexExplanation();
if (exists) {
result.setDescription(TermsQuery.this.toString() + ", product of:");
result.setValue(queryWeight);
result.setMatch(Boolean.TRUE);
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(queryNorm, "queryNorm"));
return Explanation.match(queryWeight, TermsQuery.this.toString() + ", product of:",
Explanation.match(getBoost(), "boost"), Explanation.match(queryNorm, "queryNorm"));
} else {
result.setDescription(TermsQuery.this.toString() + " doesn't match id " + doc);
result.setValue(0);
result.setMatch(Boolean.FALSE);
return Explanation.noMatch(TermsQuery.this.toString() + " doesn't match id " + doc);
}
return result;
}
@Override

View File

@ -26,7 +26,6 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FilterScorer;
import org.apache.lucene.search.IndexSearcher;
@ -115,11 +114,7 @@ public class BoostedQuery extends Query {
}
FunctionValues vals = boostVal.getValues(fcontext, readerContext);
float sc = subQueryExpl.getValue() * vals.floatVal(doc);
Explanation res = new ComplexExplanation(
true, sc, BoostedQuery.this.toString() + ", product of:");
res.addDetail(subQueryExpl);
res.addDetail(vals.explain(doc));
return res;
return Explanation.match(sc, BoostedQuery.this.toString() + ", product of:", subQueryExpl, vals.explain(doc));
}
}
@ -160,11 +155,7 @@ public class BoostedQuery extends Query {
return subQueryExpl;
}
float sc = subQueryExpl.getValue() * vals.floatVal(doc);
Explanation res = new ComplexExplanation(
true, sc, BoostedQuery.this.toString() + ", product of:");
res.addDetail(subQueryExpl);
res.addDetail(vals.explain(doc));
return res;
return Explanation.match(sc, BoostedQuery.this.toString() + ", product of:", subQueryExpl, vals.explain(doc));
}
}

View File

@ -24,7 +24,6 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -170,13 +169,10 @@ public class FunctionQuery extends Query {
public Explanation explain(int doc) throws IOException {
float sc = qWeight * vals.floatVal(doc);
Explanation result = new ComplexExplanation
(true, sc, "FunctionQuery(" + func + "), product of:");
result.addDetail(vals.explain(doc));
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(weight.queryNorm,"queryNorm"));
return result;
return Explanation.match(sc, "FunctionQuery(" + func + "), product of:",
vals.explain(doc),
Explanation.match(getBoost(), "boost"),
Explanation.match(weight.queryNorm, "queryNorm"));
}
}

View File

@ -133,7 +133,7 @@ public abstract class FunctionValues {
public void strVal(int doc, String [] vals) { throw new UnsupportedOperationException(); }
public Explanation explain(int doc) {
return new Explanation(floatVal(doc), toString(doc));
return Explanation.match(floatVal(doc), toString(doc));
}
public ValueSourceScorer getScorer(IndexReader reader) {

View File

@ -17,7 +17,18 @@ package org.apache.lucene.queries;
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.function.FunctionQuery;
import org.apache.lucene.queries.function.FunctionTestSetup;
import org.apache.lucene.queries.function.ValueSource;
@ -33,15 +44,6 @@ import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.TopDocs;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.Term;
/**
* Test CustomScoreQuery search.
@ -94,13 +96,13 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
@Override
public Explanation customExplain(int doc, Explanation subQueryExpl, Explanation valSrcExpl) {
float valSrcScore = valSrcExpl == null ? 0 : valSrcExpl.getValue();
Explanation exp = new Explanation(valSrcScore + subQueryExpl.getValue(), "custom score: sum of:");
exp.addDetail(subQueryExpl);
List<Explanation> subs = new ArrayList<>();
subs.add(subQueryExpl);
if (valSrcExpl != null) {
exp.addDetail(valSrcExpl);
subs.add(valSrcExpl);
}
return exp;
float valSrcScore = valSrcExpl == null ? 0 : valSrcExpl.getValue();
return Explanation.match(valSrcScore + subQueryExpl.getValue(), "custom score: sum of:", subs);
}
};
}
@ -140,17 +142,12 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
if (valSrcExpls.length == 0) {
return subQueryExpl;
}
Explanation exp = new Explanation(valSrcExpls[0].getValue() + subQueryExpl.getValue(), "sum of:");
exp.addDetail(subQueryExpl);
exp.addDetail(valSrcExpls[0]);
if (valSrcExpls.length == 1) {
exp.setDescription("CustomMulAdd, sum of:");
return exp;
return Explanation.match(valSrcExpls[0].getValue() + subQueryExpl.getValue(), "CustomMulAdd, sum of:", subQueryExpl, valSrcExpls[0]);
} else {
Explanation exp = Explanation.match(valSrcExpls[0].getValue() + subQueryExpl.getValue(), "sum of:", subQueryExpl, valSrcExpls[0]);
return Explanation.match(valSrcExpls[1].getValue() * exp.getValue(), "custom score: product of:", valSrcExpls[1], exp);
}
Explanation exp2 = new Explanation(valSrcExpls[1].getValue() * exp.getValue(), "custom score: product of:");
exp2.addDetail(valSrcExpls[1]);
exp2.addDetail(exp);
return exp2;
}
};
}

View File

@ -16,10 +16,13 @@
*/
package org.apache.lucene.spatial.bbox;
import com.spatial4j.core.shape.Rectangle;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.Explanation;
import com.spatial4j.core.shape.Rectangle;
/**
* The algorithm is implemented as envelope on envelope (rect on rect) overlays rather than
* complex polygon on complex polygon overlays.
@ -126,7 +129,7 @@ public class BBoxOverlapRatioValueSource extends BBoxSimilarityValueSource {
}
@Override
protected double score(Rectangle target, Explanation exp) {
protected double score(Rectangle target, AtomicReference<Explanation> exp) {
// calculate "height": the intersection height between two boxes.
double top = Math.min(queryExtent.getMaxY(), target.getMaxY());
double bottom = Math.max(queryExtent.getMinY(), target.getMinY());
@ -211,25 +214,19 @@ public class BBoxOverlapRatioValueSource extends BBoxSimilarityValueSource {
double score = queryFactor + targetFactor;
if (exp!=null) {
exp.setValue((float)score);
exp.setDescription(this.getClass().getSimpleName()+": queryFactor + targetFactor");
Explanation e;//tmp
String minSideDesc = minSideLength > 0.0 ? " (minSide="+minSideLength+")" : "";
exp.addDetail( e = new Explanation((float)intersectionArea, "IntersectionArea" + minSideDesc));
e.addDetail(new Explanation((float)width, "width"));
e.addDetail(new Explanation((float)height, "height"));
e.addDetail(new Explanation((float)queryTargetProportion, "queryTargetProportion"));
exp.addDetail( e = new Explanation((float)queryFactor, "queryFactor"));
e.addDetail(new Explanation((float)queryRatio, "ratio"));
e.addDetail(new Explanation((float)queryArea, "area of " + queryExtent + minSideDesc));
exp.addDetail( e = new Explanation((float)targetFactor, "targetFactor"));
e.addDetail(new Explanation((float)targetRatio, "ratio"));
e.addDetail(new Explanation((float)targetArea, "area of " + target + minSideDesc));
exp.set(Explanation.match((float) score,
this.getClass().getSimpleName()+": queryFactor + targetFactor",
Explanation.match((float)intersectionArea, "IntersectionArea" + minSideDesc,
Explanation.match((float)width, "width"),
Explanation.match((float)height, "height"),
Explanation.match((float)queryTargetProportion, "queryTargetProportion")),
Explanation.match((float)queryFactor, "queryFactor",
Explanation.match((float)targetRatio, "ratio"),
Explanation.match((float)queryArea, "area of " + queryExtent + minSideDesc)),
Explanation.match((float)targetFactor, "targetFactor",
Explanation.match((float)targetRatio, "ratio"),
Explanation.match((float)targetArea, "area of " + target + minSideDesc))));
}
return score;

View File

@ -17,7 +17,10 @@ package org.apache.lucene.spatial.bbox;
* limitations under the License.
*/
import com.spatial4j.core.shape.Rectangle;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -25,8 +28,7 @@ import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import java.io.IOException;
import java.util.Map;
import com.spatial4j.core.shape.Rectangle;
/**
* A base class for calculating a spatial relevance rank per document from a provided
@ -81,10 +83,10 @@ public abstract class BBoxSimilarityValueSource extends ValueSource {
public Explanation explain(int doc) {
final Rectangle rect = (Rectangle) shapeValues.objectVal(doc);
if (rect == null)
return new Explanation(0, "no rect");
Explanation exp = new Explanation();
score(rect, exp);
return exp;
return Explanation.noMatch("no rect");
AtomicReference<Explanation> explanation = new AtomicReference<>();
score(rect, explanation);
return explanation.get();
}
};
}
@ -95,7 +97,7 @@ public abstract class BBoxSimilarityValueSource extends ValueSource {
* @param exp Optional diagnostic holder.
* @return a score.
*/
protected abstract double score(Rectangle rect, Explanation exp);
protected abstract double score(Rectangle rect, AtomicReference<Explanation> exp);
@Override
public boolean equals(Object o) {

View File

@ -87,7 +87,7 @@ class BBoxValueSource extends ValueSource {
@Override
public Explanation explain(int doc) {
return new Explanation(Float.NaN, toString(doc));
return Explanation.match(Float.NaN, toString(doc));
}
@Override

View File

@ -266,7 +266,7 @@ public class SerializedDVStrategy extends SpatialStrategy {
@Override
public Explanation explain(int doc) {
return new Explanation(Float.NaN, toString(doc));
return Explanation.match(Float.NaN, toString(doc));
}
@Override

View File

@ -17,10 +17,12 @@ package org.apache.lucene.spatial.util;
* limitations under the License.
*/
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceCalculator;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Shape;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -28,8 +30,10 @@ import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import java.io.IOException;
import java.util.Map;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceCalculator;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Shape;
/**
* The distance from a provided Point to a Point retrieved from a ValueSource via
@ -84,8 +88,9 @@ public class DistanceToShapeValueSource extends ValueSource {
@Override
public Explanation explain(int doc) {
Explanation exp = super.explain(doc);
exp.addDetail(shapeValues.explain(doc));
return exp;
List<Explanation> details = new ArrayList<>(Arrays.asList(exp.getDetails()));
details.add(shapeValues.explain(doc));
return Explanation.match(exp.getValue(), exp.getDescription(), details);
}
};
}

View File

@ -18,10 +18,14 @@ package org.apache.lucene.spatial.util;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -83,8 +87,9 @@ public class ShapeAreaValueSource extends ValueSource {
@Override
public Explanation explain(int doc) {
Explanation exp = super.explain(doc);
exp.addDetail(shapeValues.explain(doc));
return exp;
List<Explanation> details = new ArrayList<>(Arrays.asList(exp.getDetails()));
details.add(shapeValues.explain(doc));
return Explanation.match(exp.getValue(), exp.getDescription(), details);
}
};
}

View File

@ -18,6 +18,7 @@ package org.apache.lucene.spatial.util;
*/
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
@ -27,6 +28,9 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.spatial.query.SpatialOperation;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
@ -79,8 +83,9 @@ public class ShapePredicateValueSource extends ValueSource {
@Override
public Explanation explain(int doc) {
Explanation exp = super.explain(doc);
exp.addDetail(shapeValues.explain(doc));
return exp;
List<Explanation> details = new ArrayList<>(Arrays.asList(exp.getDetails()));
details.add(shapeValues.explain(doc));
return Explanation.match(exp.getValue(), exp.getDescription(), details);
}
};
}

View File

@ -347,7 +347,7 @@ public class CheckHits {
if (expl.getDescription().endsWith("computed from:")) {
return; // something more complicated.
}
if (detail!=null) {
if (detail.length > 0) {
if (detail.length==1) {
// simple containment, unless it's a freq of: (which lets a query explain how the freq is calculated),
// just verify contained expl has same score

View File

@ -22,10 +22,6 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Rectangle;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
@ -35,7 +31,6 @@ import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.valuesource.VectorValueSource;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@ -53,6 +48,11 @@ import org.apache.solr.search.QParser;
import org.apache.solr.search.SpatialOptions;
import org.apache.solr.util.SpatialUtils;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Rectangle;
/**
* Represents a Latitude/Longitude as a 2 dimensional point. Latitude is <b>always</b> specified first.
@ -495,13 +495,14 @@ class SpatialDistanceQuery extends ExtendedQueryBase implements PostFilter {
String description = SpatialDistanceQuery.this.toString();
Explanation result = new ComplexExplanation
(this.doc == doc, sc, description + " product of:");
// result.addDetail(new Explanation((float)dist, "hsin("+latVals.explain(doc)+","+lonVals.explain(doc)));
result.addDetail(new Explanation((float)dist, "hsin("+latVals.doubleVal(doc)+","+lonVals.doubleVal(doc)));
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(weight.queryNorm,"queryNorm"));
return result;
if (matched) {
return Explanation.match(sc, description + " product of:",
Explanation.match((float) dist, "hsin("+latVals.doubleVal(doc)+","+lonVals.doubleVal(doc)),
Explanation.match(getBoost(), "boost"),
Explanation.match(weight.queryNorm,"queryNorm"));
} else {
return Explanation.noMatch("No match");
}
}
}

View File

@ -31,7 +31,6 @@ import org.apache.lucene.index.MultiPostingsEnum;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
@ -568,22 +567,13 @@ class JoinQuery extends Query {
Scorer scorer = scorer(context, context.reader().getLiveDocs());
boolean exists = scorer.advance(doc) == doc;
ComplexExplanation result = new ComplexExplanation();
if (exists) {
result.setDescription(this.toString()
+ " , product of:");
result.setValue(queryWeight);
result.setMatch(Boolean.TRUE);
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(queryNorm,"queryNorm"));
return Explanation.match(queryWeight, this.toString() + " , product of:",
Explanation.match(getBoost(), "boost"),
Explanation.match(queryNorm,"queryNorm"));
} else {
result.setDescription(this.toString()
+ " doesn't match id " + doc);
result.setValue(0);
result.setMatch(Boolean.FALSE);
return Explanation.noMatch(this.toString() + " doesn't match id " + doc);
}
return result;
}
}

View File

@ -7,7 +7,6 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.search.ComplexExplanation;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
@ -135,22 +134,13 @@ public class SolrConstantScoreQuery extends ConstantScoreQuery implements Extend
ConstantScorer cs = new ConstantScorer(context, this, queryWeight, context.reader().getLiveDocs());
boolean exists = cs.docIdSetIterator.advance(doc) == doc;
ComplexExplanation result = new ComplexExplanation();
if (exists) {
result.setDescription("ConstantScoreQuery(" + filter
+ "), product of:");
result.setValue(queryWeight);
result.setMatch(Boolean.TRUE);
result.addDetail(new Explanation(getBoost(), "boost"));
result.addDetail(new Explanation(queryNorm,"queryNorm"));
return Explanation.match(queryWeight, "ConstantScoreQuery(" + filter + "), product of:",
Explanation.match(getBoost(), "boost"),
Explanation.match(queryNorm,"queryNorm"));
} else {
result.setDescription("ConstantScoreQuery(" + filter
+ ") doesn't match id " + doc);
result.setValue(0);
result.setMatch(Boolean.FALSE);
return Explanation.noMatch("ConstantScoreQuery(" + filter + ") doesn't match id " + doc);
}
return result;
}
}

View File

@ -387,7 +387,7 @@ public class SolrPluginUtils {
Explanation[] details = e.getDetails();
// short circut out
if (null == details || 0 == details.length) return out;
if (0 == details.length) return out;
List<NamedList<Object>> kids
= new ArrayList<>(details.length);