mirror of https://github.com/apache/lucene.git
LUCENE-7996: Queries are now required to produce positive scores.
This commit is contained in:
parent
e1851c0ce7
commit
a8a63464e7
|
@ -23,6 +23,9 @@ API Changes
|
|||
* LUCENE-8014: Similarity.computeSlopFactor() and
|
||||
Similarity.computePayloadFactor() have been removed (Alan Woodward)
|
||||
|
||||
* LUCENE-7996: Queries are now required to produce positive scores.
|
||||
(Adrien Grand)
|
||||
|
||||
Changes in Runtime Behavior
|
||||
|
||||
* LUCENE-7837: Indices that were created before the previous major version
|
||||
|
|
|
@ -6,3 +6,12 @@ SpanQuery and PhraseQuery now always calculate their slops as (1.0 / (1.0 +
|
|||
distance)). Payload factor calculation is performed by PayloadDecoder in the
|
||||
queries module
|
||||
|
||||
|
||||
## Scorer must produce positive scores (LUCENE-7996) ##
|
||||
|
||||
Scorers are no longer allowed to produce negative scores. If you have custom
|
||||
query implementations, you should make sure their score formula may never produce
|
||||
negative scores.
|
||||
|
||||
As a side-effect of this change, negative boosts are now rejected and
|
||||
FunctionScoreQuery maps negative values to 0.
|
||||
|
|
|
@ -37,6 +37,9 @@ public final class BoostQuery extends Query {
|
|||
* scores will be boosted by {@code boost}. */
|
||||
public BoostQuery(Query query, float boost) {
|
||||
this.query = Objects.requireNonNull(query);
|
||||
if (Float.isFinite(boost) == false || Float.compare(boost, 0f) < 0) {
|
||||
throw new IllegalArgumentException("boost must be a positive float, got " + boost);
|
||||
}
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
|
|
|
@ -63,9 +63,8 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
|||
public void collect(int doc) throws IOException {
|
||||
float score = scorer.score();
|
||||
|
||||
// This collector cannot handle these scores:
|
||||
assert score != Float.NEGATIVE_INFINITY;
|
||||
assert !Float.isNaN(score);
|
||||
// This collector relies on the fact that scorers produce positive values:
|
||||
assert score >= 0; // NOTE: false for NaN
|
||||
|
||||
totalHits++;
|
||||
if (score <= pqTop.score) {
|
||||
|
@ -114,9 +113,8 @@ public abstract class TopScoreDocCollector extends TopDocsCollector<ScoreDoc> {
|
|||
public void collect(int doc) throws IOException {
|
||||
float score = scorer.score();
|
||||
|
||||
// This collector cannot handle these scores:
|
||||
assert score != Float.NEGATIVE_INFINITY;
|
||||
assert !Float.isNaN(score);
|
||||
// This collector relies on the fact that scorers produce positive values:
|
||||
assert score >= 0; // NOTE: false for NaN
|
||||
|
||||
totalHits++;
|
||||
|
||||
|
|
|
@ -33,48 +33,6 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
/** JUnit adaptation of an older test case SearchTest. */
|
||||
public class TestSearch extends LuceneTestCase {
|
||||
|
||||
public void testNegativeQueryBoost() throws Exception {
|
||||
BoostQuery q = new BoostQuery(new TermQuery(new Term("foo", "bar")), -42f);
|
||||
assertEquals(-42f, q.getBoost(), 0f);
|
||||
|
||||
Directory directory = newDirectory();
|
||||
try {
|
||||
Analyzer analyzer = new MockAnalyzer(random());
|
||||
IndexWriterConfig conf = newIndexWriterConfig(analyzer);
|
||||
|
||||
IndexWriter writer = new IndexWriter(directory, conf);
|
||||
try {
|
||||
Document d = new Document();
|
||||
d.add(newTextField("foo", "bar", Field.Store.YES));
|
||||
writer.addDocument(d);
|
||||
} finally {
|
||||
writer.close();
|
||||
}
|
||||
|
||||
IndexReader reader = DirectoryReader.open(directory);
|
||||
try {
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
|
||||
ScoreDoc[] hits = searcher.search(q, 1000).scoreDocs;
|
||||
assertEquals(1, hits.length);
|
||||
assertTrue("score is positive: " + hits[0].score,
|
||||
hits[0].score <= 0);
|
||||
|
||||
Explanation explain = searcher.explain(q, hits[0].doc);
|
||||
assertEquals("score doesn't match explanation",
|
||||
hits[0].score, explain.getValue(), 0.001f);
|
||||
assertTrue("explain doesn't think doc is a match",
|
||||
explain.isMatch());
|
||||
|
||||
} finally {
|
||||
reader.close();
|
||||
}
|
||||
} finally {
|
||||
directory.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** This test performs a number of searches. It also compares output
|
||||
* of searches using multi-file index segments with single-file
|
||||
* index segments.
|
||||
|
|
|
@ -26,8 +26,22 @@ import org.apache.lucene.util.LuceneTestCase;
|
|||
|
||||
public class TestBoostQuery extends LuceneTestCase {
|
||||
|
||||
public void testValidation() {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new BoostQuery(new MatchAllDocsQuery(), -3));
|
||||
assertEquals("boost must be a positive float, got -3.0", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new BoostQuery(new MatchAllDocsQuery(), -0f));
|
||||
assertEquals("boost must be a positive float, got -0.0", e.getMessage());
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class,
|
||||
() -> new BoostQuery(new MatchAllDocsQuery(), Float.NaN));
|
||||
assertEquals("boost must be a positive float, got NaN", e.getMessage());
|
||||
}
|
||||
|
||||
public void testEquals() {
|
||||
final float boost = random().nextFloat() * 3 - 1;
|
||||
final float boost = random().nextFloat() * 3;
|
||||
BoostQuery q1 = new BoostQuery(new MatchAllDocsQuery(), boost);
|
||||
BoostQuery q2 = new BoostQuery(new MatchAllDocsQuery(), boost);
|
||||
assertEquals(q1, q2);
|
||||
|
@ -35,7 +49,7 @@ public class TestBoostQuery extends LuceneTestCase {
|
|||
|
||||
float boost2 = boost;
|
||||
while (boost == boost2) {
|
||||
boost2 = random().nextFloat() * 3 - 1;
|
||||
boost2 = random().nextFloat() * 3;
|
||||
}
|
||||
BoostQuery q3 = new BoostQuery(new MatchAllDocsQuery(), boost2);
|
||||
assertFalse(q1.equals(q3));
|
||||
|
|
|
@ -127,7 +127,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
|
|||
q.add(new BoostQuery(t, 1000), Occur.SHOULD);
|
||||
|
||||
t = new ConstantScoreQuery(matchTheseItems(new int[] {0,2}));
|
||||
q.add(new BoostQuery(t, -20), Occur.SHOULD);
|
||||
q.add(new BoostQuery(t, 20), Occur.SHOULD);
|
||||
|
||||
List<Query> disjuncts = new ArrayList<>();
|
||||
disjuncts.add(snear(st("w2"),
|
||||
|
|
|
@ -507,22 +507,6 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
|
|||
assertEquals(hits, 1);
|
||||
directory.close();
|
||||
}
|
||||
|
||||
public void testNegativeScore() throws Exception {
|
||||
DisjunctionMaxQuery q = new DisjunctionMaxQuery(
|
||||
Arrays.asList(
|
||||
new BoostQuery(tq("hed", "albino"), -1f),
|
||||
new BoostQuery(tq("hed", "elephant"), -1f)
|
||||
), 0.0f);
|
||||
|
||||
ScoreDoc[] h = s.search(q, 1000).scoreDocs;
|
||||
|
||||
assertEquals("all docs should match " + q.toString(), 4, h.length);
|
||||
|
||||
for (int i = 0; i < h.length; i++) {
|
||||
assertTrue("score should be negative", h[i].score < 0);
|
||||
}
|
||||
}
|
||||
|
||||
public void testRewriteBoolean() throws Exception {
|
||||
Query sub1 = tq("hed", "albino");
|
||||
|
|
|
@ -476,7 +476,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
|||
return num;
|
||||
} else {
|
||||
//System.out.println("score doc=" + docID + " num=" + -num);
|
||||
return -num;
|
||||
return 1f / (1 + num);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -56,6 +56,10 @@ public class BoostingQuery extends Query {
|
|||
public BoostingQuery(Query match, Query context, float boost) {
|
||||
this.match = match;
|
||||
this.context = context; // ignore context-only matches
|
||||
if (Float.isFinite(boost) == false || Float.compare(boost, 0f) < 0) {
|
||||
// otherwise scores could be negative
|
||||
throw new IllegalArgumentException("boost must be a non-negative float, got " + boost);
|
||||
}
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
|
|
|
@ -100,14 +100,27 @@ public final class BoostedQuery extends Query {
|
|||
return subQueryExpl;
|
||||
}
|
||||
FunctionValues vals = boostVal.getValues(fcontext, readerContext);
|
||||
float sc = subQueryExpl.getValue() * vals.floatVal(doc);
|
||||
return Explanation.match(sc, BoostedQuery.this.toString() + ", product of:", subQueryExpl, vals.explain(doc));
|
||||
float factor = vals.floatVal(doc);
|
||||
Explanation factorExpl = vals.explain(doc);
|
||||
if (factor < 0) {
|
||||
factor = 0;
|
||||
factorExpl = Explanation.match(0, "truncated score, max of:",
|
||||
Explanation.match(0f, "minimum score"), factorExpl);
|
||||
} else if (Float.isNaN(factor)) {
|
||||
factor = 0;
|
||||
factorExpl = Explanation.match(0, "score, computed as (score == NaN ? 0 : score) since NaN is an illegal score from:", factorExpl);
|
||||
}
|
||||
|
||||
float sc = subQueryExpl.getValue() * factor;
|
||||
return Explanation.match(sc, BoostedQuery.this.toString() + ", product of:",
|
||||
subQueryExpl, factorExpl);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private class CustomScorer extends FilterScorer {
|
||||
private final BoostedQuery.BoostedWeight weight;
|
||||
private final ValueSource vs;
|
||||
private final FunctionValues vals;
|
||||
private final LeafReaderContext readerContext;
|
||||
|
||||
|
@ -116,33 +129,23 @@ public final class BoostedQuery extends Query {
|
|||
super(scorer);
|
||||
this.weight = w;
|
||||
this.readerContext = readerContext;
|
||||
this.vs = vs;
|
||||
this.vals = vs.getValues(weight.fcontext, readerContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
float score = in.score() * vals.floatVal(in.docID());
|
||||
|
||||
// Current Lucene priority queues can't handle NaN and -Infinity, so
|
||||
// map to -Float.MAX_VALUE. This conditional handles both -infinity
|
||||
// and NaN since comparisons with NaN are always false.
|
||||
return score>Float.NEGATIVE_INFINITY ? score : -Float.MAX_VALUE;
|
||||
float factor = vals.floatVal(in.docID());
|
||||
if (factor >= 0 == false) { // covers NaN as well
|
||||
factor = 0;
|
||||
}
|
||||
return in.score() * factor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<ChildScorer> getChildren() {
|
||||
return Collections.singleton(new ChildScorer(in, "CUSTOM"));
|
||||
}
|
||||
|
||||
public Explanation explain(int doc) throws IOException {
|
||||
Explanation subQueryExpl = weight.qWeight.explain(readerContext ,doc);
|
||||
if (!subQueryExpl.isMatch()) {
|
||||
return subQueryExpl;
|
||||
}
|
||||
float sc = subQueryExpl.getValue() * vals.floatVal(doc);
|
||||
return Explanation.match(sc, BoostedQuery.this.toString() + ", product of:", subQueryExpl, vals.explain(doc));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -115,18 +115,23 @@ public class FunctionQuery extends Query {
|
|||
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
float score = boost * vals.floatVal(docID());
|
||||
|
||||
// Current Lucene priority queues can't handle NaN and -Infinity, so
|
||||
// map to -Float.MAX_VALUE. This conditional handles both -infinity
|
||||
// and NaN since comparisons with NaN are always false.
|
||||
return score>Float.NEGATIVE_INFINITY ? score : -Float.MAX_VALUE;
|
||||
float val = vals.floatVal(docID());
|
||||
if (val >= 0 == false) { // this covers NaN as well since comparisons with NaN return false
|
||||
return 0;
|
||||
} else {
|
||||
return boost * val;
|
||||
}
|
||||
}
|
||||
|
||||
public Explanation explain(int doc) throws IOException {
|
||||
float sc = boost * vals.floatVal(doc);
|
||||
Explanation expl = vals.explain(doc);
|
||||
if (expl.getValue() < 0) {
|
||||
expl = Explanation.match(0, "truncated score, max of:", Explanation.match(0f, "minimum score"), expl);
|
||||
} else if (Float.isNaN(expl.getValue())) {
|
||||
expl = Explanation.match(0, "score, computed as (score == NaN ? 0 : score) since NaN is an illegal score from:", expl);
|
||||
}
|
||||
|
||||
return Explanation.match(sc, "FunctionQuery(" + func + "), product of:",
|
||||
return Explanation.match(boost * expl.getValue(), "FunctionQuery(" + func + "), product of:",
|
||||
vals.explain(doc),
|
||||
Explanation.match(weight.boost, "boost"));
|
||||
}
|
||||
|
|
|
@ -110,13 +110,44 @@ public final class FunctionScoreQuery extends Query {
|
|||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
Scorer scorer = inner.scorer(context);
|
||||
if (scorer.iterator().advance(doc) != doc)
|
||||
return Explanation.noMatch("No match");
|
||||
Explanation scoreExplanation = inner.explain(context, doc);
|
||||
Explanation expl = valueSource.explain(context, doc, scoreExplanation);
|
||||
return Explanation.match(expl.getValue() * boost, "product of:",
|
||||
Explanation.match(boost, "boost"), expl);
|
||||
if (scoreExplanation.isMatch() == false) {
|
||||
return scoreExplanation;
|
||||
}
|
||||
|
||||
Scorer scorer = inner.scorer(context);
|
||||
DoubleValues values = valueSource.getValues(context, DoubleValuesSource.fromScorer(scorer));
|
||||
int advanced = scorer.iterator().advance(doc);
|
||||
assert advanced == doc;
|
||||
|
||||
double value;
|
||||
Explanation expl;
|
||||
if (values.advanceExact(doc)) {
|
||||
value = values.doubleValue();
|
||||
expl = valueSource.explain(context, doc, scoreExplanation);
|
||||
if (value < 0) {
|
||||
value = 0;
|
||||
expl = Explanation.match(0, "truncated score, max of:",
|
||||
Explanation.match(0f, "minimum score"), expl);
|
||||
} else if (Double.isNaN(value)) {
|
||||
value = 0;
|
||||
expl = Explanation.match(0, "score, computed as (score == NaN ? 0 : score) since NaN is an illegal score from:", expl);
|
||||
}
|
||||
} else {
|
||||
value = 0;
|
||||
expl = valueSource.explain(context, doc, scoreExplanation);
|
||||
}
|
||||
|
||||
if (expl.isMatch() == false) {
|
||||
expl = Explanation.match(0f, "weight(" + getQuery().toString() + ") using default score of 0 because the function produced no value:", expl);
|
||||
} else if (boost != 1f) {
|
||||
expl = Explanation.match((float) (value * boost), "weight(" + getQuery().toString() + "), product of:",
|
||||
Explanation.match(boost, "boost"), expl);
|
||||
} else {
|
||||
expl = Explanation.match(expl.getValue(), "weight(" + getQuery().toString() + "), result of:", expl);
|
||||
}
|
||||
|
||||
return expl;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -128,10 +159,14 @@ public final class FunctionScoreQuery extends Query {
|
|||
return new FilterScorer(in) {
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
if (scores.advanceExact(docID()))
|
||||
return (float) (scores.doubleValue() * boost);
|
||||
else
|
||||
return 0;
|
||||
if (scores.advanceExact(docID())) {
|
||||
double factor = scores.doubleValue();
|
||||
if (factor >= 0) {
|
||||
return (float) (factor * boost);
|
||||
}
|
||||
}
|
||||
// default: missing value, negative value or NaN
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -60,15 +60,15 @@ public class TestFunctionScoreExplanations extends BaseExplanationTestCase {
|
|||
public void testExplanationsIncludingScore() throws Exception {
|
||||
|
||||
Query q = new TermQuery(new Term(FIELD, "w1"));
|
||||
FunctionScoreQuery csq = new FunctionScoreQuery(q, DoubleValuesSource.SCORES);
|
||||
FunctionScoreQuery fsq = new FunctionScoreQuery(q, DoubleValuesSource.SCORES);
|
||||
|
||||
qtest(csq, new int[] { 0, 1, 2, 3 });
|
||||
qtest(fsq, new int[] { 0, 1, 2, 3 });
|
||||
|
||||
Explanation e1 = searcher.explain(q, 0);
|
||||
Explanation e = searcher.explain(csq, 0);
|
||||
Explanation e = searcher.explain(fsq, 0);
|
||||
|
||||
assertEquals(e.getValue(), e1.getValue(), 0.00001);
|
||||
assertEquals(e.getDetails()[1], e1);
|
||||
assertEquals(e.getDetails()[0], e1);
|
||||
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,7 @@ public class TestFunctionScoreExplanations extends BaseExplanationTestCase {
|
|||
searcher.setSimilarity(new BM25Similarity());
|
||||
|
||||
Explanation expl = searcher.explain(query, 0);
|
||||
Explanation subExpl = expl.getDetails()[1];
|
||||
Explanation subExpl = expl.getDetails()[0];
|
||||
assertEquals("constant(5.0)", subExpl.getDescription());
|
||||
assertEquals(0, subExpl.getDetails().length);
|
||||
|
||||
|
|
|
@ -21,8 +21,11 @@ import java.io.IOException;
|
|||
import java.util.function.DoubleUnaryOperator;
|
||||
import java.util.function.ToDoubleBiFunction;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.NumericDocValuesField;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
|
@ -30,11 +33,14 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.DoubleValues;
|
||||
import org.apache.lucene.search.DoubleValuesSource;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryUtils;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
@ -216,4 +222,39 @@ public class TestFunctionScoreQuery extends FunctionTestSetup {
|
|||
};
|
||||
}
|
||||
|
||||
public void testTruncateNegativeScores() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("foo", -2));
|
||||
w.addDocument(doc);
|
||||
IndexReader reader = DirectoryReader.open(w);
|
||||
w.close();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), DoubleValuesSource.fromLongField("foo"));
|
||||
QueryUtils.check(random(), q, searcher);
|
||||
Explanation expl = searcher.explain(q, 0);
|
||||
assertEquals(0, expl.getValue(), 0f);
|
||||
assertTrue(expl.toString(), expl.getDetails()[0].getDescription().contains("truncated score"));
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
public void testNaN() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, newIndexWriterConfig());
|
||||
Document doc = new Document();
|
||||
doc.add(new NumericDocValuesField("foo", Double.doubleToLongBits(Double.NaN)));
|
||||
w.addDocument(doc);
|
||||
IndexReader reader = DirectoryReader.open(w);
|
||||
w.close();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), DoubleValuesSource.fromDoubleField("foo"));
|
||||
QueryUtils.check(random(), q, searcher);
|
||||
Explanation expl = searcher.explain(q, 0);
|
||||
assertEquals(0, expl.getValue(), 0f);
|
||||
assertTrue(expl.toString(), expl.getDetails()[0].getDescription().contains("NaN is an illegal score"));
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -107,8 +107,8 @@ public class TestValueSources extends LuceneTestCase {
|
|||
|
||||
static final List<String[]> documents = Arrays.asList(new String[][] {
|
||||
/* id, double, float, int, long, string, text, double MV (x3), int MV (x3)*/
|
||||
new String[] { "0", "3.63", "5.2", "35", "4343", "test", "this is a test test test", "2.13", "3.69", "-0.11", "1", "7", "5"},
|
||||
new String[] { "1", "5.65", "9.3", "54", "1954", "bar", "second test", "12.79", "123.456", "0.01", "12", "900", "-1" },
|
||||
new String[] { "0", "3.63", "5.2", "35", "4343", "test", "this is a test test test", "2.13", "3.69", "0.11", "1", "7", "5"},
|
||||
new String[] { "1", "5.65", "9.3", "54", "1954", "bar", "second test", "12.79", "123.456", "0.01", "12", "900", "3" },
|
||||
});
|
||||
|
||||
@BeforeClass
|
||||
|
@ -203,7 +203,7 @@ public class TestValueSources extends LuceneTestCase {
|
|||
assertAllExist(vs);
|
||||
|
||||
vs = new MultiValuedDoubleFieldSource("doubleMv", Type.MIN);
|
||||
assertHits(new FunctionQuery(vs), new float[] { -0.11f, 0.01f });
|
||||
assertHits(new FunctionQuery(vs), new float[] { 0.11f, 0.01f });
|
||||
assertAllExist(vs);
|
||||
}
|
||||
|
||||
|
@ -220,7 +220,7 @@ public class TestValueSources extends LuceneTestCase {
|
|||
assertAllExist(vs);
|
||||
|
||||
vs = new MultiValuedFloatFieldSource("floatMv", Type.MIN);
|
||||
assertHits(new FunctionQuery(vs), new float[] { -0.11f, 0.01f });
|
||||
assertHits(new FunctionQuery(vs), new float[] { 0.11f, 0.01f });
|
||||
assertAllExist(vs);
|
||||
}
|
||||
|
||||
|
@ -277,7 +277,7 @@ public class TestValueSources extends LuceneTestCase {
|
|||
assertAllExist(vs);
|
||||
|
||||
vs = new MultiValuedIntFieldSource("intMv", Type.MIN);
|
||||
assertHits(new FunctionQuery(vs), new float[] { 1f, -1f });
|
||||
assertHits(new FunctionQuery(vs), new float[] { 1f, 3f });
|
||||
assertAllExist(vs);
|
||||
}
|
||||
|
||||
|
@ -309,7 +309,7 @@ public class TestValueSources extends LuceneTestCase {
|
|||
assertAllExist(vs);
|
||||
|
||||
vs = new MultiValuedLongFieldSource("longMv", Type.MIN);
|
||||
assertHits(new FunctionQuery(vs), new float[] { 1f, -1f });
|
||||
assertHits(new FunctionQuery(vs), new float[] { 1f, 3f });
|
||||
assertAllExist(vs);
|
||||
}
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ public final class AssertingQuery extends Query {
|
|||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
assert boost >= 0;
|
||||
return new AssertingWeight(new Random(random.nextLong()), in.createWeight(searcher, needsScores, boost), needsScores);
|
||||
}
|
||||
|
||||
|
|
|
@ -69,6 +69,7 @@ public class AssertingScorer extends Scorer {
|
|||
assert iterating();
|
||||
final float score = in.score();
|
||||
assert !Float.isNaN(score) : "NaN score for in="+in;
|
||||
assert Float.compare(score, 0f) >= 0 : score;
|
||||
return score;
|
||||
}
|
||||
|
||||
|
|
|
@ -336,7 +336,7 @@ public class CheckHits {
|
|||
Assert.assertTrue("Child doc explanations are missing", detail.length > 0);
|
||||
}
|
||||
if (detail.length > 0) {
|
||||
if (detail.length==1) {
|
||||
if (detail.length==1 && COMPUTED_FROM_PATTERN.matcher(descr).matches() == false) {
|
||||
// simple containment, unless it's a freq of: (which lets a query explain how the freq is calculated),
|
||||
// just verify contained expl has same score
|
||||
if (expl.getDescription().endsWith("with freq of:") == false
|
||||
|
|
|
@ -49,6 +49,7 @@ public class AssertingSimilarity extends Similarity {
|
|||
|
||||
@Override
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
assert boost >= 0;
|
||||
assert collectionStats != null;
|
||||
assert termStats.length > 0;
|
||||
for (TermStatistics term : termStats) {
|
||||
|
@ -91,7 +92,7 @@ public class AssertingSimilarity extends Similarity {
|
|||
float score = delegateScorer.score(doc, freq);
|
||||
assert Float.isFinite(score);
|
||||
// TODO: some tests have negative boosts today
|
||||
assert score >= 0 || assertingWeight.boost < 0;
|
||||
assert score >= 0;
|
||||
return score;
|
||||
}
|
||||
|
||||
|
|
|
@ -222,12 +222,6 @@ public class TestSolrQueryParser extends SolrTestCaseJ4 {
|
|||
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
||||
assertEquals(3.0, ((BoostQuery) q).getBoost(), 0.0f);
|
||||
|
||||
qParser = QParser.getParser("(text:x text:y)^=-3", req);
|
||||
q = qParser.getQuery();
|
||||
assertTrue(q instanceof BoostQuery);
|
||||
assertTrue(((BoostQuery) q).getQuery() instanceof ConstantScoreQuery);
|
||||
assertEquals(-3.0, ((BoostQuery) q).getBoost(), 0.0f);
|
||||
|
||||
req.close();
|
||||
}
|
||||
|
||||
|
|
|
@ -150,7 +150,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
|
||||
// test constant score
|
||||
singleTest(field,"1.414213", 10, 1.414213f);
|
||||
singleTest(field,"-1.414213", 10, -1.414213f);
|
||||
singleTest(field,"-1.414213", 10, 0f);
|
||||
|
||||
singleTest(field,"sum(\0,1)", 10, 11);
|
||||
singleTest(field,"sum(\0,\0)", 10, 20);
|
||||
|
@ -166,20 +166,20 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
singleTest(field,"abs(\0)",10,10, -4,4);
|
||||
singleTest(field,"pow(\0,\0)",0,1, 5,3125);
|
||||
singleTest(field,"pow(\0,0.5)",100,10, 25,5, 0,0);
|
||||
singleTest(field,"div(1,\0)",-4,-.25f, 10,.1f, 100,.01f);
|
||||
singleTest(field,"div(1,\0)",-4,0f, 10,.1f, 100,.01f);
|
||||
singleTest(field,"div(1,1)",-4,1, 10,1);
|
||||
|
||||
singleTest(field,"sqrt(abs(\0))",-4,2);
|
||||
singleTest(field,"sqrt(sum(29,\0))",-4,5);
|
||||
|
||||
singleTest(field,"map(\0,0,0,500)",10,10, -4,-4, 0,500);
|
||||
singleTest(field,"map(\0,0,0,500)",10,10, -4,0, 0,500);
|
||||
singleTest(field,"map(\0,-4,5,500)",100,100, -4,500, 0,500, 5,500, 10,10, 25,25);
|
||||
singleTest(field,"map(\0,0,0,sum(\0,500))",10,10, -4,-4, 0,500);
|
||||
singleTest(field,"map(\0,0,0,sum(\0,500),sum(\0,1))",10,11, -4,-3, 0,500);
|
||||
singleTest(field,"map(\0,-4,5,sum(\0,1))",100,100, -4,-3, 0,1, 5,6, 10,10, 25,25);
|
||||
singleTest(field,"map(\0,0,0,sum(\0,500))",10,10, -4,0, 0,500);
|
||||
singleTest(field,"map(\0,0,0,sum(\0,500),sum(\0,1))",10,11, -4,0, 0,500);
|
||||
singleTest(field,"map(\0,-4,5,sum(\0,1))",100,100, -4,0, 0,1, 5,6, 10,10, 25,25);
|
||||
|
||||
singleTest(field,"scale(\0,-1,1)",-4,-1, 100,1, 0,-0.9230769f);
|
||||
singleTest(field,"scale(\0,-10,1000)",-4,-10, 100,1000, 0,28.846153f);
|
||||
singleTest(field,"scale(\0,-1,1)",-4,0, 100,1, 0,0);
|
||||
singleTest(field,"scale(\0,-10,1000)",-4,0, 100,1000, 0,28.846153f);
|
||||
|
||||
// test that infinity doesn't mess up scale function
|
||||
singleTest(field,"scale(log(\0),-1000,1000)",100,1000);
|
||||
|
@ -222,7 +222,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
// Unsorted field, largest first
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250");
|
||||
// test identity (straight field value)
|
||||
singleTest(field, "\0", 54321, 543210, 0,-999, 25,250, 100, 1);
|
||||
singleTest(field, "\0", 54321, 543210, 0,0, 25,250, 100, 1);
|
||||
Object orig = FileFloatSource.onlyForTesting;
|
||||
singleTest(field, "log(\0)");
|
||||
// make sure the values were cached
|
||||
|
@ -273,7 +273,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
float[] answers = new float[ids.length*2];
|
||||
for (int j=0; j<len; j++) {
|
||||
answers[j*2] = ids[j];
|
||||
answers[j*2+1] = vals[j];
|
||||
answers[j*2+1] = Math.max(0, vals[j]);
|
||||
}
|
||||
for (int j=len; j<ids.length; j++) {
|
||||
answers[j*2] = ids[j];
|
||||
|
@ -296,7 +296,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
assertU(adoc("id", "993", keyField, "CCC=CCC"));
|
||||
assertU(commit());
|
||||
makeExternalFile(extField, "AAA=AAA=543210\nBBB=-8\nCCC=CCC=250");
|
||||
singleTest(extField,"\0",991,543210,992,-8,993,250);
|
||||
singleTest(extField,"\0",991,543210,992,0,993,250);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -310,7 +310,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
assertU(adoc("id", "993", keyField, "93"));
|
||||
assertU(commit());
|
||||
makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67");
|
||||
singleTest(extField,"\0",991,543210,992,-8,993,250);
|
||||
singleTest(extField,"\0",991,543210,992,0,993,250);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -366,7 +366,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
|
||||
|
||||
// test that we can subtract dates to millisecond precision
|
||||
assertQ(req("fl","*,score","q", "{!func}ms(a_tdt,b_tdt)", "fq","id:1"), "//float[@name='score']='-1.0'");
|
||||
assertQ(req("fl","*,score","q", "{!func}ms(a_tdt,b_tdt)", "fq","id:1"), "//float[@name='score']='0.0'");
|
||||
assertQ(req("fl","*,score","q", "{!func}ms(b_tdt,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'");
|
||||
assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,2009-08-31T12:10:10.124Z)", "fq","id:1"), "//float[@name='score']='1.0'");
|
||||
assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.124Z,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'");
|
||||
|
@ -756,7 +756,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
// Unsorted field, largest first
|
||||
makeExternalFile(field, "54321=543210\n0=-999\n25=250");
|
||||
// test identity (straight field value)
|
||||
singleTest(fieldAsFunc, "\0", 54321, 543210, 0,-999, 25,250, 100, 1);
|
||||
singleTest(fieldAsFunc, "\0", 54321, 543210, 0,0, 25,250, 100, 1);
|
||||
Object orig = FileFloatSource.onlyForTesting;
|
||||
singleTest(fieldAsFunc, "log(\0)");
|
||||
// make sure the values were cached
|
||||
|
@ -790,7 +790,7 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
|||
|
||||
// test identity (straight field value)
|
||||
singleTest(fieldAsFunc, "\0",
|
||||
100,100, -4,-4, 0,0, 10,10, 25,25, 5,5, 77,77, 1,1);
|
||||
100,100, -4,0, 0,0, 10,10, 25,25, 5,5, 77,77, 1,1);
|
||||
singleTest(fieldAsFunc, "sqrt(\0)",
|
||||
100,10, 25,5, 0,0, 1,1);
|
||||
singleTest(fieldAsFunc, "log(\0)", 1,0);
|
||||
|
|
Loading…
Reference in New Issue