mirror of https://github.com/apache/lucene.git
LUCENE-7368: Remove queryNorm.
This commit is contained in:
parent
7b4af27aa8
commit
5def78ba10
|
@ -13,6 +13,8 @@ API Changes
|
|||
* LUCENE-7369: Similarity.coord and BooleanQuery.disableCoord are removed.
|
||||
(Adrien Grand)
|
||||
|
||||
* LUCENE-7368: Removed query normalization. (Adrien Grand)
|
||||
|
||||
Bug Fixes
|
||||
|
||||
Improvements
|
||||
|
|
|
@ -29,3 +29,10 @@ undesirable. The new BM25Similarity does not suffer from this problem since it
|
|||
has better saturation for the contribution of the term frequency so the coord
|
||||
factors have been removed from scores. Things now work as if coords were always
|
||||
disabled when constructing boolean queries.
|
||||
|
||||
## Weight.getValueForNormalization() and Weight.normalize() removed (LUCENE-7368)
|
||||
|
||||
Query normalization's goal was to make scores comparable across queries, which
|
||||
was only implemented by the ClassicSimilarity. Since ClassicSimilarity is not
|
||||
the default similarity anymore, this functionality has been removed. Boosts are
|
||||
now propagated through Query#createWeight.
|
||||
|
|
|
@ -196,12 +196,12 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
BooleanQuery query = this;
|
||||
if (needsScores == false) {
|
||||
query = rewriteNoScoring();
|
||||
}
|
||||
return new BooleanWeight(query, searcher, needsScores);
|
||||
return new BooleanWeight(query, searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -42,14 +42,14 @@ final class BooleanWeight extends Weight {
|
|||
final ArrayList<Weight> weights;
|
||||
final boolean needsScores;
|
||||
|
||||
BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
super(query);
|
||||
this.query = query;
|
||||
this.needsScores = needsScores;
|
||||
this.similarity = searcher.getSimilarity(needsScores);
|
||||
weights = new ArrayList<>();
|
||||
for (BooleanClause c : query) {
|
||||
Weight w = searcher.createWeight(c.getQuery(), needsScores && c.isScoring());
|
||||
Weight w = searcher.createWeight(c.getQuery(), needsScores && c.isScoring(), boost);
|
||||
weights.add(w);
|
||||
}
|
||||
}
|
||||
|
@ -65,31 +65,6 @@ final class BooleanWeight extends Weight {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
float sum = 0.0f;
|
||||
int i = 0;
|
||||
for (BooleanClause clause : query) {
|
||||
// call sumOfSquaredWeights for all clauses in case of side effects
|
||||
float s = weights.get(i).getValueForNormalization(); // sum sub weights
|
||||
if (clause.isScoring()) {
|
||||
// only add to sum for scoring clauses
|
||||
sum += s;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
return sum ;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
for (Weight w : weights) {
|
||||
// normalize all clauses, (even if non-scoring in case of side affects)
|
||||
w.normalize(norm, boost);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
final int minShouldMatch = query.getMinimumNumberShouldMatch();
|
||||
|
|
|
@ -19,11 +19,8 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
/**
|
||||
* A {@link Query} wrapper that allows to give a boost to the wrapped query.
|
||||
|
@ -113,45 +110,8 @@ public final class BoostQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight weight = query.createWeight(searcher, needsScores);
|
||||
if (needsScores == false) {
|
||||
return weight;
|
||||
}
|
||||
// Apply the query boost, this may impact the return value of getValueForNormalization()
|
||||
weight.normalize(1f, boost);
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
weight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return weight.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return weight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
weight.normalize(norm, BoostQuery.this.boost * boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return weight.scorer(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
|
||||
return weight.bulkScorer(context);
|
||||
}
|
||||
};
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return query.createWeight(searcher, needsScores, BoostQuery.this.boost * boost);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -110,10 +110,10 @@ public final class ConstantScoreQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight innerWeight = searcher.createWeight(query, false);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight innerWeight = searcher.createWeight(query, false, 1f);
|
||||
if (needsScores) {
|
||||
return new ConstantScoreWeight(this) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
|
||||
@Override
|
||||
public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
|
||||
|
|
|
@ -32,13 +32,11 @@ import org.apache.lucene.index.Term;
|
|||
*/
|
||||
public abstract class ConstantScoreWeight extends Weight {
|
||||
|
||||
private float boost;
|
||||
private float queryNorm;
|
||||
private float queryWeight;
|
||||
private final float score;
|
||||
|
||||
protected ConstantScoreWeight(Query query) {
|
||||
protected ConstantScoreWeight(Query query, float score) {
|
||||
super(query);
|
||||
normalize(1f, 1f);
|
||||
this.score = score;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -48,31 +46,9 @@ public abstract class ConstantScoreWeight extends Weight {
|
|||
// override if your constant-score query does wrap terms
|
||||
}
|
||||
|
||||
@Override
|
||||
public final float getValueForNormalization() throws IOException {
|
||||
return queryWeight * queryWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
this.boost = boost;
|
||||
queryNorm = norm;
|
||||
queryWeight = queryNorm * boost;
|
||||
}
|
||||
|
||||
/** Return the normalization factor for this weight. */
|
||||
protected final float queryNorm() {
|
||||
return queryNorm;
|
||||
}
|
||||
|
||||
/** Return the boost for this weight. */
|
||||
protected final float boost() {
|
||||
return boost;
|
||||
}
|
||||
|
||||
/** Return the score produced by this {@link Weight}. */
|
||||
protected final float score() {
|
||||
return queryWeight;
|
||||
return score;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -92,8 +68,7 @@ public abstract class ConstantScoreWeight extends Weight {
|
|||
|
||||
if (exists) {
|
||||
return Explanation.match(
|
||||
queryWeight, getQuery().toString() + ", product of:",
|
||||
Explanation.match(boost, "boost"), Explanation.match(queryNorm, "queryNorm"));
|
||||
score, getQuery().toString() + (score == 1f ? "" : "^" + score));
|
||||
} else {
|
||||
return Explanation.noMatch(getQuery().toString() + " doesn't match id " + doc);
|
||||
}
|
||||
|
|
|
@ -100,10 +100,10 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
|
|||
private final boolean needsScores;
|
||||
|
||||
/** Construct the Weight for this Query searched by searcher. Recursively construct subquery weights. */
|
||||
public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
super(DisjunctionMaxQuery.this);
|
||||
for (Query disjunctQuery : disjuncts) {
|
||||
weights.add(searcher.createWeight(disjunctQuery, needsScores));
|
||||
weights.add(searcher.createWeight(disjunctQuery, needsScores, boost));
|
||||
}
|
||||
this.needsScores = needsScores;
|
||||
}
|
||||
|
@ -115,27 +115,6 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
|
|||
}
|
||||
}
|
||||
|
||||
/** Compute the sub of squared weights of us applied to our subqueries. Used for normalization. */
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
float max = 0.0f, sum = 0.0f;
|
||||
for (Weight currentWeight : weights) {
|
||||
float sub = currentWeight.getValueForNormalization();
|
||||
sum += sub;
|
||||
max = Math.max(max, sub);
|
||||
|
||||
}
|
||||
return (((sum - max) * tieBreakerMultiplier * tieBreakerMultiplier) + max);
|
||||
}
|
||||
|
||||
/** Apply the computed normalization factor to our subqueries */
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
for (Weight wt : weights) {
|
||||
wt.normalize(norm, boost);
|
||||
}
|
||||
}
|
||||
|
||||
/** Create the scorer used to score our associated DisjunctionMaxQuery */
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
|
@ -186,8 +165,8 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
|
|||
|
||||
/** Create the Weight used to score us */
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new DisjunctionMaxWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new DisjunctionMaxWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
/** Optimize our representation and our subqueries representations
|
||||
|
|
|
@ -73,8 +73,8 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
|
|||
public final String getField() { return query.getField(); }
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new RandomAccessWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new RandomAccessWeight(this, boost) {
|
||||
@Override
|
||||
protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {
|
||||
final SortedSetDocValues fcsi = DocValues.getSortedSet(context.reader(), query.field);
|
||||
|
|
|
@ -59,8 +59,8 @@ public final class FieldValueQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new RandomAccessWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new RandomAccessWeight(this, boost) {
|
||||
|
||||
@Override
|
||||
protected Bits getMatchingDocs(LeafReaderContext context) throws IOException {
|
||||
|
|
|
@ -87,18 +87,8 @@ public class IndexSearcher {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return new SimWeight() {
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {}
|
||||
|
||||
};
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return new SimWeight() {};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -732,14 +722,7 @@ public class IndexSearcher {
|
|||
*/
|
||||
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
|
||||
query = rewrite(query);
|
||||
Weight weight = createWeight(query, needsScores);
|
||||
float v = weight.getValueForNormalization();
|
||||
float norm = getSimilarity(needsScores).queryNorm(v);
|
||||
if (Float.isInfinite(norm) || Float.isNaN(norm)) {
|
||||
norm = 1.0f;
|
||||
}
|
||||
weight.normalize(norm, 1.0f);
|
||||
return weight;
|
||||
return createWeight(query, needsScores, 1f);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -747,9 +730,9 @@ public class IndexSearcher {
|
|||
* if possible and configured.
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public Weight createWeight(Query query, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(Query query, boolean needsScores, float boost) throws IOException {
|
||||
final QueryCache queryCache = this.queryCache;
|
||||
Weight weight = query.createWeight(this, needsScores);
|
||||
Weight weight = query.createWeight(this, needsScores, boost);
|
||||
if (needsScores == false && queryCache != null) {
|
||||
weight = queryCache.doCache(weight, queryCachingPolicy);
|
||||
}
|
||||
|
|
|
@ -688,7 +688,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
|
|||
private final AtomicBoolean used;
|
||||
|
||||
CachingWrapperWeight(Weight in, QueryCachingPolicy policy) {
|
||||
super(in.getQuery());
|
||||
super(in.getQuery(), 1f);
|
||||
this.in = in;
|
||||
this.policy = policy;
|
||||
used = new AtomicBoolean(false);
|
||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.lucene.util.Bits;
|
|||
public final class MatchAllDocsQuery extends Query {
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public String toString() {
|
||||
return "weight(" + MatchAllDocsQuery.this + ")";
|
||||
|
|
|
@ -42,7 +42,7 @@ public class MatchNoDocsQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new Weight(this) {
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
|
@ -58,29 +58,6 @@ public class MatchNoDocsQuery extends Query {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final float getValueForNormalization() throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
}
|
||||
|
||||
/** Return the normalization factor for this weight. */
|
||||
protected final float queryNorm() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Return the boost for this weight. */
|
||||
protected final float boost() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
/** Return the score produced by this {@link Weight}. */
|
||||
protected final float score() {
|
||||
return 0;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ public class MultiPhraseQuery extends Query {
|
|||
private final Map<Term,TermContext> termContexts = new HashMap<>();
|
||||
private final boolean needsScores;
|
||||
|
||||
public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores)
|
||||
public MultiPhraseWeight(IndexSearcher searcher, boolean needsScores, float boost)
|
||||
throws IOException {
|
||||
super(MultiPhraseQuery.this);
|
||||
this.needsScores = needsScores;
|
||||
|
@ -207,6 +207,7 @@ public class MultiPhraseQuery extends Query {
|
|||
}
|
||||
}
|
||||
stats = similarity.computeWeight(
|
||||
boost,
|
||||
searcher.collectionStatistics(field),
|
||||
allTermStats.toArray(new TermStatistics[allTermStats.size()]));
|
||||
}
|
||||
|
@ -218,16 +219,6 @@ public class MultiPhraseQuery extends Query {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return stats.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
stats.normalize(queryNorm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
assert termArrays.length != 0;
|
||||
|
@ -331,8 +322,8 @@ public class MultiPhraseQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new MultiPhraseWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new MultiPhraseWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
/** Prints a user-readable version of this query. */
|
||||
|
|
|
@ -108,8 +108,8 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
|
|||
public final String getField() { return query.getField(); }
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
|
||||
/** Try to collect terms from the given terms enum and return true iff all
|
||||
* terms could be collected. If {@code false} is returned, the enum is
|
||||
|
@ -153,8 +153,7 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
|
|||
bq.add(new TermQuery(new Term(query.field, t.term), termContext), Occur.SHOULD);
|
||||
}
|
||||
Query q = new ConstantScoreQuery(bq.build());
|
||||
final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores);
|
||||
weight.normalize(1f, score());
|
||||
final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores, score());
|
||||
return new WeightOrDocIdSet(weight);
|
||||
}
|
||||
|
||||
|
|
|
@ -356,7 +356,7 @@ public class PhraseQuery extends Query {
|
|||
private final boolean needsScores;
|
||||
private transient TermContext states[];
|
||||
|
||||
public PhraseWeight(IndexSearcher searcher, boolean needsScores)
|
||||
public PhraseWeight(IndexSearcher searcher, boolean needsScores, float boost)
|
||||
throws IOException {
|
||||
super(PhraseQuery.this);
|
||||
final int[] positions = PhraseQuery.this.getPositions();
|
||||
|
@ -375,7 +375,7 @@ public class PhraseQuery extends Query {
|
|||
states[i] = TermContext.build(context, term);
|
||||
termStats[i] = searcher.termStatistics(term, states[i]);
|
||||
}
|
||||
stats = similarity.computeWeight(searcher.collectionStatistics(field), termStats);
|
||||
stats = similarity.computeWeight(boost, searcher.collectionStatistics(field), termStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -386,16 +386,6 @@ public class PhraseQuery extends Query {
|
|||
@Override
|
||||
public String toString() { return "weight(" + PhraseQuery.this + ")"; }
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return stats.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
stats.normalize(queryNorm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
assert terms.length > 0;
|
||||
|
@ -507,8 +497,8 @@ public class PhraseQuery extends Query {
|
|||
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new PhraseWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new PhraseWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
/** Prints a user-readable version of this query. */
|
||||
|
|
|
@ -106,12 +106,12 @@ public abstract class PointInSetQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
|
||||
// We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
|
||||
// This is an inverted structure and should be used in the first pass:
|
||||
|
||||
return new ConstantScoreWeight(this) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
|
|
|
@ -98,12 +98,12 @@ public abstract class PointRangeQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
|
||||
// We don't use RandomAccessWeight here: it's no good to approximate with "match all docs".
|
||||
// This is an inverted structure and should be used in the first pass:
|
||||
|
||||
return new ConstantScoreWeight(this) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
|
||||
private DocIdSet buildMatchingDocIdSet(LeafReader reader, PointValues values) throws IOException {
|
||||
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
|
||||
|
|
|
@ -61,8 +61,9 @@ public abstract class Query {
|
|||
*
|
||||
* @param needsScores True if document scores ({@link Scorer#score}) or match
|
||||
* frequencies ({@link Scorer#freq}) are needed.
|
||||
* @param boost The boost that is propagated by the parent queries.
|
||||
*/
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
throw new UnsupportedOperationException("Query " + this + " does not implement createWeight");
|
||||
}
|
||||
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.lucene.util.Bits.MatchNoBits;
|
|||
public abstract class RandomAccessWeight extends ConstantScoreWeight {
|
||||
|
||||
/** Sole constructor. */
|
||||
protected RandomAccessWeight(Query query) {
|
||||
super(query);
|
||||
protected RandomAccessWeight(Query query, float boost) {
|
||||
super(query, boost);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -110,16 +110,16 @@ public final class SynonymQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
if (needsScores) {
|
||||
return new SynonymWeight(this, searcher);
|
||||
return new SynonymWeight(this, searcher, boost);
|
||||
} else {
|
||||
// if scores are not needed, let BooleanWeight deal with optimizing that case.
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
for (Term term : terms) {
|
||||
bq.add(new TermQuery(term), BooleanClause.Occur.SHOULD);
|
||||
}
|
||||
return searcher.rewrite(bq.build()).createWeight(searcher, needsScores);
|
||||
return searcher.rewrite(bq.build()).createWeight(searcher, needsScores, boost);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ public final class SynonymQuery extends Query {
|
|||
private final Similarity similarity;
|
||||
private final Similarity.SimWeight simWeight;
|
||||
|
||||
SynonymWeight(Query query, IndexSearcher searcher) throws IOException {
|
||||
SynonymWeight(Query query, IndexSearcher searcher, float boost) throws IOException {
|
||||
super(query);
|
||||
CollectionStatistics collectionStats = searcher.collectionStatistics(terms[0].field());
|
||||
long docFreq = 0;
|
||||
|
@ -146,7 +146,7 @@ public final class SynonymQuery extends Query {
|
|||
}
|
||||
TermStatistics pseudoStats = new TermStatistics(null, docFreq, totalTermFreq);
|
||||
this.similarity = searcher.getSimilarity(true);
|
||||
this.simWeight = similarity.computeWeight(collectionStats, pseudoStats);
|
||||
this.simWeight = similarity.computeWeight(boost, collectionStats, pseudoStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -183,16 +183,6 @@ public final class SynonymQuery extends Query {
|
|||
return Explanation.noMatch("no matching term");
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return simWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
simWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Similarity.SimScorer simScorer = similarity.simScorer(simWeight, context);
|
||||
|
|
|
@ -48,8 +48,8 @@ public class TermQuery extends Query {
|
|||
private final TermContext termStates;
|
||||
private final boolean needsScores;
|
||||
|
||||
public TermWeight(IndexSearcher searcher, boolean needsScores, TermContext termStates)
|
||||
throws IOException {
|
||||
public TermWeight(IndexSearcher searcher, boolean needsScores,
|
||||
float boost, TermContext termStates) throws IOException {
|
||||
super(TermQuery.this);
|
||||
this.needsScores = needsScores;
|
||||
assert termStates != null : "TermContext must not be null";
|
||||
|
@ -70,7 +70,7 @@ public class TermQuery extends Query {
|
|||
termStats = new TermStatistics(term.bytes(), docFreq, totalTermFreq);
|
||||
}
|
||||
|
||||
this.stats = similarity.computeWeight(collectionStats, termStats);
|
||||
this.stats = similarity.computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -83,16 +83,6 @@ public class TermQuery extends Query {
|
|||
return "weight(" + TermQuery.this + ")";
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return stats.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
stats.normalize(queryNorm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
assert termStates.topReaderContext == ReaderUtil.getTopLevelContext(context) : "The top-reader used to create Weight (" + termStates.topReaderContext + ") is not the same as the current reader's top-reader (" + ReaderUtil.getTopLevelContext(context);
|
||||
|
@ -173,7 +163,7 @@ public class TermQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final IndexReaderContext context = searcher.getTopReaderContext();
|
||||
final TermContext termState;
|
||||
if (perReaderTermState == null
|
||||
|
@ -186,7 +176,7 @@ public class TermQuery extends Query {
|
|||
termState = this.perReaderTermState;
|
||||
}
|
||||
|
||||
return new TermWeight(searcher, needsScores, termState);
|
||||
return new TermWeight(searcher, needsScores, boost, termState);
|
||||
}
|
||||
|
||||
/** Prints a user-readable version of this query. */
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.lucene.index.IndexReaderContext;
|
|||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.similarities.Similarity;
|
||||
import org.apache.lucene.util.Bits;
|
||||
|
||||
/**
|
||||
|
@ -44,13 +43,7 @@ import org.apache.lucene.util.Bits;
|
|||
* A <code>Weight</code> is used in the following way:
|
||||
* <ol>
|
||||
* <li>A <code>Weight</code> is constructed by a top-level query, given a
|
||||
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean)}).
|
||||
* <li>The {@link #getValueForNormalization()} method is called on the
|
||||
* <code>Weight</code> to compute the query normalization factor
|
||||
* {@link Similarity#queryNorm(float)} of the query clauses contained in the
|
||||
* query.
|
||||
* <li>The query normalization factor is passed to {@link #normalize(float, float)}. At
|
||||
* this point the weighting is complete.
|
||||
* <code>IndexSearcher</code> ({@link Query#createWeight(IndexSearcher, boolean, float)}).
|
||||
* <li>A <code>Scorer</code> is constructed by
|
||||
* {@link #scorer(org.apache.lucene.index.LeafReaderContext)}.
|
||||
* </ol>
|
||||
|
@ -90,12 +83,6 @@ public abstract class Weight {
|
|||
public final Query getQuery() {
|
||||
return parentQuery;
|
||||
}
|
||||
|
||||
/** The value for normalization of contained query clauses (e.g. sum of squared weights). */
|
||||
public abstract float getValueForNormalization() throws IOException;
|
||||
|
||||
/** Assigns the query normalization factor and boost to this. */
|
||||
public abstract void normalize(float norm, float boost);
|
||||
|
||||
/**
|
||||
* Returns a {@link Scorer} which can iterate in order over all matching
|
||||
|
|
|
@ -357,7 +357,7 @@
|
|||
* {@link org.apache.lucene.search.Query Query} class has several methods that are important for
|
||||
* derived classes:
|
||||
* <ol>
|
||||
* <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)} — A
|
||||
* <li>{@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean,float) createWeight(IndexSearcher searcher, boolean needsScores, float boost)} — A
|
||||
* {@link org.apache.lucene.search.Weight Weight} is the internal representation of the
|
||||
* Query, so each Query implementation must
|
||||
* provide an implementation of Weight. See the subsection on <a
|
||||
|
@ -366,7 +366,7 @@
|
|||
* <li>{@link org.apache.lucene.search.Query#rewrite(org.apache.lucene.index.IndexReader) rewrite(IndexReader reader)} — Rewrites queries into primitive queries. Primitive queries are:
|
||||
* {@link org.apache.lucene.search.TermQuery TermQuery},
|
||||
* {@link org.apache.lucene.search.BooleanQuery BooleanQuery}, <span
|
||||
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean,float)}</span></li>
|
||||
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean,float) createWeight(IndexSearcher searcher,boolean needsScores, float boost)}</span></li>
|
||||
* </ol>
|
||||
* <a name="weightClass"></a>
|
||||
* <h3>The Weight Interface</h3>
|
||||
|
@ -381,28 +381,6 @@
|
|||
* {@link org.apache.lucene.search.Weight#getQuery getQuery()} — Pointer to the
|
||||
* Query that this Weight represents.</li>
|
||||
* <li>
|
||||
* {@link org.apache.lucene.search.Weight#getValueForNormalization() getValueForNormalization()} —
|
||||
* A weight can return a floating point value to indicate its magnitude for query normalization. Typically
|
||||
* a weight such as TermWeight that scores via a {@link org.apache.lucene.search.similarities.Similarity Similarity}
|
||||
* will just defer to the Similarity's implementation:
|
||||
* {@link org.apache.lucene.search.similarities.Similarity.SimWeight#getValueForNormalization SimWeight#getValueForNormalization()}.
|
||||
* For example, with {@link org.apache.lucene.search.similarities.TFIDFSimilarity Lucene's classic vector-space formula}, this
|
||||
* is implemented as the sum of squared weights: <code>(idf * boost)<sup>2</sup></code></li>
|
||||
* <li>
|
||||
* {@link org.apache.lucene.search.Weight#normalize(float,float) normalize(float norm, float boost)} —
|
||||
* Performs query normalization:
|
||||
* <ul>
|
||||
* <li><code>boost</code>: A query-boost factor from any wrapping queries that should be multiplied into every
|
||||
* document's score. For example, a TermQuery that is wrapped within a BooleanQuery with a boost of <code>5</code> would
|
||||
* receive this value at this time. This allows the TermQuery (the leaf node in this case) to compute this up-front
|
||||
* a single time (e.g. by multiplying into the IDF), rather than for every document.</li>
|
||||
* <li><code>norm</code>: Passes in a a normalization factor which may
|
||||
* allow for comparing scores between queries.</li>
|
||||
* </ul>
|
||||
* Typically a weight such as TermWeight
|
||||
* that scores via a {@link org.apache.lucene.search.similarities.Similarity Similarity} will just defer to the Similarity's implementation:
|
||||
* {@link org.apache.lucene.search.similarities.Similarity.SimWeight#normalize SimWeight#normalize(float,float)}.</li>
|
||||
* <li>
|
||||
* {@link org.apache.lucene.search.Weight#scorer scorer()} —
|
||||
* Construct a new {@link org.apache.lucene.search.Scorer Scorer} for this Weight. See <a href="#scorerClass">The Scorer Class</a>
|
||||
* below for help defining a Scorer. As the name implies, the Scorer is responsible for doing the actual scoring of documents
|
||||
|
|
|
@ -205,7 +205,7 @@ public class BM25Similarity extends Similarity {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
Explanation idf = termStats.length == 1 ? idfExplain(collectionStats, termStats[0]) : idfExplain(collectionStats, termStats);
|
||||
|
||||
float avgdl = avgFieldLength(collectionStats);
|
||||
|
@ -215,7 +215,7 @@ public class BM25Similarity extends Similarity {
|
|||
for (int i = 0; i < cache.length; i++) {
|
||||
cache[i] = k1 * ((1 - b) + b * decodeNormValue((byte)i) / avgdl);
|
||||
}
|
||||
return new BM25Stats(collectionStats.field(), idf, avgdl, cache);
|
||||
return new BM25Stats(collectionStats.field(), boost, idf, avgdl, cache);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -267,34 +267,23 @@ public class BM25Similarity extends Similarity {
|
|||
/** The average document length. */
|
||||
private final float avgdl;
|
||||
/** query boost */
|
||||
private float boost;
|
||||
private final float boost;
|
||||
/** weight (idf * boost) */
|
||||
private float weight;
|
||||
private final float weight;
|
||||
/** field name, for pulling norms */
|
||||
private final String field;
|
||||
/** precomputed norm[256] with k1 * ((1 - b) + b * dl / avgdl) */
|
||||
private final float cache[];
|
||||
|
||||
BM25Stats(String field, Explanation idf, float avgdl, float cache[]) {
|
||||
BM25Stats(String field, float boost, Explanation idf, float avgdl, float cache[]) {
|
||||
this.field = field;
|
||||
this.boost = boost;
|
||||
this.idf = idf;
|
||||
this.avgdl = avgdl;
|
||||
this.cache = cache;
|
||||
normalize(1f, 1f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
// we return a TF-IDF like normalization to be nice, but we don't actually normalize ourselves.
|
||||
return weight * weight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
// we don't normalize with queryNorm at all, we just capture the top-level boost
|
||||
this.boost = boost;
|
||||
this.weight = idf.getValue() * boost;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private Explanation explainTFNorm(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
|
||||
|
|
|
@ -38,15 +38,13 @@ public class BasicStats extends Similarity.SimWeight {
|
|||
|
||||
// -------------------------- Boost-related stuff --------------------------
|
||||
|
||||
/** For most Similarities, the immediate and the top level query boosts are
|
||||
* not handled differently. Hence, this field is just the product of the
|
||||
* other two. */
|
||||
protected float boost;
|
||||
/** A query boost. Should be applied as a multiplicative factor to the score. */
|
||||
protected final float boost;
|
||||
|
||||
/** Constructor. */
|
||||
public BasicStats(String field) {
|
||||
public BasicStats(String field, float boost) {
|
||||
this.field = field;
|
||||
normalize(1f, 1f);
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
// ------------------------- Getter/setter methods -------------------------
|
||||
|
@ -107,31 +105,6 @@ public class BasicStats extends Similarity.SimWeight {
|
|||
this.totalTermFreq = totalTermFreq;
|
||||
}
|
||||
|
||||
// -------------------------- Boost-related stuff --------------------------
|
||||
|
||||
/** The square of the raw normalization value.
|
||||
* @see #rawNormalizationValue() */
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
float rawValue = rawNormalizationValue();
|
||||
return rawValue * rawValue;
|
||||
}
|
||||
|
||||
/** Computes the raw normalization value. This basic implementation returns
|
||||
* the query boost. Subclasses may override this method to include other
|
||||
* factors (such as idf), or to save the value for inclusion in
|
||||
* {@link #normalize(float, float)}, etc.
|
||||
*/
|
||||
protected float rawNormalizationValue() {
|
||||
return boost;
|
||||
}
|
||||
|
||||
/** No normalization is done. {@code boost} is saved in the object, however. */
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
/** Returns the total boost. */
|
||||
public float getBoost() {
|
||||
return boost;
|
||||
|
|
|
@ -55,12 +55,6 @@ public class ClassicSimilarity extends TFIDFSimilarity {
|
|||
|
||||
/** Sole constructor: parameter-free */
|
||||
public ClassicSimilarity() {}
|
||||
|
||||
/** Implemented as <code>1/sqrt(sumOfSquaredWeights)</code>. */
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return (float)(1.0 / Math.sqrt(sumOfSquaredWeights));
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes a normalization factor for storage in an index.
|
||||
|
|
|
@ -54,8 +54,8 @@ public abstract class LMSimilarity extends SimilarityBase {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected BasicStats newStats(String field) {
|
||||
return new LMStats(field);
|
||||
protected BasicStats newStats(String field, float boost) {
|
||||
return new LMStats(field, boost);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -108,8 +108,8 @@ public abstract class LMSimilarity extends SimilarityBase {
|
|||
/**
|
||||
* Creates LMStats for the provided field and query-time boost
|
||||
*/
|
||||
public LMStats(String field) {
|
||||
super(field);
|
||||
public LMStats(String field, float boost) {
|
||||
super(field, boost);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -50,10 +50,10 @@ public class MultiSimilarity extends Similarity {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
SimWeight subStats[] = new SimWeight[sims.length];
|
||||
for (int i = 0; i < subStats.length; i++) {
|
||||
subStats[i] = sims[i].computeWeight(collectionStats, termStats);
|
||||
subStats[i] = sims[i].computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
return new MultiStats(subStats);
|
||||
}
|
||||
|
@ -109,21 +109,5 @@ public class MultiSimilarity extends Similarity {
|
|||
MultiStats(SimWeight subStats[]) {
|
||||
this.subStats = subStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
float sum = 0.0f;
|
||||
for (SimWeight stat : subStats) {
|
||||
sum += stat.getValueForNormalization();
|
||||
}
|
||||
return sum / subStats.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
for (SimWeight stat : subStats) {
|
||||
stat.normalize(queryNorm, boost);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,10 +46,10 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
PerFieldSimWeight weight = new PerFieldSimWeight();
|
||||
weight.delegate = get(collectionStats.field());
|
||||
weight.delegateWeight = weight.delegate.computeWeight(collectionStats, termStats);
|
||||
weight.delegateWeight = weight.delegate.computeWeight(boost, collectionStats, termStats);
|
||||
return weight;
|
||||
}
|
||||
|
||||
|
@ -67,15 +67,5 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
|
|||
static class PerFieldSimWeight extends SimWeight {
|
||||
Similarity delegate;
|
||||
SimWeight delegateWeight;
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return delegateWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
delegateWeight.normalize(queryNorm, boost);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.lucene.search.similarities;
|
|||
|
||||
import org.apache.lucene.index.FieldInvertState;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.CollectionStatistics;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -78,16 +77,12 @@ import java.util.Collections;
|
|||
* <a name="querytime">Query time</a>
|
||||
* At query-time, Queries interact with the Similarity via these steps:
|
||||
* <ol>
|
||||
* <li>The {@link #computeWeight(CollectionStatistics, TermStatistics...)} method is called a single time,
|
||||
* <li>The {@link #computeWeight(float, CollectionStatistics, TermStatistics...)} method is called a single time,
|
||||
* allowing the implementation to compute any statistics (such as IDF, average document length, etc)
|
||||
* across <i>the entire collection</i>. The {@link TermStatistics} and {@link CollectionStatistics} passed in
|
||||
* already contain all of the raw statistics involved, so a Similarity can freely use any combination
|
||||
* of statistics without causing any additional I/O. Lucene makes no assumption about what is
|
||||
* stored in the returned {@link Similarity.SimWeight} object.
|
||||
* <li>The query normalization process occurs a single time: {@link Similarity.SimWeight#getValueForNormalization()}
|
||||
* is called for each query leaf node, {@link Similarity#queryNorm(float)} is called for the top-level
|
||||
* query, and finally {@link Similarity.SimWeight#normalize(float, float)} passes down the normalization value
|
||||
* and any top-level boosts (e.g. from enclosing {@link BooleanQuery}s).
|
||||
* <li>For each segment in the index, the Query creates a {@link #simScorer(SimWeight, org.apache.lucene.index.LeafReaderContext)}
|
||||
* The score() method is called for each matching document.
|
||||
* </ol>
|
||||
|
@ -109,23 +104,6 @@ public abstract class Similarity {
|
|||
*/
|
||||
public Similarity() {}
|
||||
|
||||
/** Computes the normalization value for a query given the sum of the
|
||||
* normalized weights {@link SimWeight#getValueForNormalization()} of
|
||||
* each of the query terms. This value is passed back to the
|
||||
* weight ({@link SimWeight#normalize(float, float)} of each query
|
||||
* term, to provide a hook to attempt to make scores from different
|
||||
* queries comparable.
|
||||
* <p>
|
||||
* By default this is disabled (returns <code>1</code>), but some
|
||||
* implementations such as {@link TFIDFSimilarity} override this.
|
||||
*
|
||||
* @param valueForNormalization the sum of the term normalization values
|
||||
* @return a normalization factor for query weights
|
||||
*/
|
||||
public float queryNorm(float valueForNormalization) {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the normalization value for a field, given the accumulated
|
||||
* state of term processing for this field (see {@link FieldInvertState}).
|
||||
|
@ -144,15 +122,17 @@ public abstract class Similarity {
|
|||
/**
|
||||
* Compute any collection-level weight (e.g. IDF, average document length, etc) needed for scoring a query.
|
||||
*
|
||||
* @param boost a multiplicative factor to apply to the produces scores
|
||||
* @param collectionStats collection-level statistics, such as the number of tokens in the collection.
|
||||
* @param termStats term-level statistics, such as the document frequency of a term across the collection.
|
||||
* @return SimWeight object with the information this Similarity needs to score a query.
|
||||
*/
|
||||
public abstract SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats);
|
||||
public abstract SimWeight computeWeight(float boost,
|
||||
CollectionStatistics collectionStats, TermStatistics... termStats);
|
||||
|
||||
/**
|
||||
* Creates a new {@link Similarity.SimScorer} to score matching documents from a segment of the inverted index.
|
||||
* @param weight collection information from {@link #computeWeight(CollectionStatistics, TermStatistics...)}
|
||||
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
|
||||
* @param context segment of the inverted index to be scored.
|
||||
* @return SloppySimScorer for scoring documents across <code>context</code>
|
||||
* @throws IOException if there is a low-level I/O error
|
||||
|
@ -215,24 +195,6 @@ public abstract class Similarity {
|
|||
* constructors, typically implicit.)
|
||||
*/
|
||||
public SimWeight() {}
|
||||
|
||||
/** The value for normalization of contained query clauses (e.g. sum of squared weights).
|
||||
* <p>
|
||||
* NOTE: a Similarity implementation might not use any query normalization at all,
|
||||
* it's not required. However, if it wants to participate in query normalization,
|
||||
* it can return a value here.
|
||||
*/
|
||||
public abstract float getValueForNormalization();
|
||||
|
||||
/** Assigns the query normalization factor and boost from parent queries to this.
|
||||
* <p>
|
||||
* NOTE: a Similarity implementation might not use this normalized value at all,
|
||||
* it's not required. However, it's usually a good idea to at least incorporate
|
||||
* the boost into its score.
|
||||
* <p>
|
||||
* NOTE: If this method is called several times, it behaves as if only the
|
||||
* last call was performed.
|
||||
*/
|
||||
public abstract void normalize(float queryNorm, float boost);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -83,18 +83,18 @@ public abstract class SimilarityBase extends Similarity {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
BasicStats stats[] = new BasicStats[termStats.length];
|
||||
for (int i = 0; i < termStats.length; i++) {
|
||||
stats[i] = newStats(collectionStats.field());
|
||||
stats[i] = newStats(collectionStats.field(), boost);
|
||||
fillBasicStats(stats[i], collectionStats, termStats[i]);
|
||||
}
|
||||
return stats.length == 1 ? stats[0] : new MultiSimilarity.MultiStats(stats);
|
||||
}
|
||||
|
||||
/** Factory method to return a custom stats object */
|
||||
protected BasicStats newStats(String field) {
|
||||
return new BasicStats(field);
|
||||
protected BasicStats newStats(String field, float boost) {
|
||||
return new BasicStats(field, boost);
|
||||
}
|
||||
|
||||
/** Fills all member fields defined in {@code BasicStats} in {@code stats}.
|
||||
|
|
|
@ -262,9 +262,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
* <tr>
|
||||
* <td valign="middle" align="right" rowspan="1">
|
||||
* score(q,d) =
|
||||
* <A HREF="#formula_queryNorm"><span style="color: #FF33CC">queryNorm(q)</span></A> ·
|
||||
* </td>
|
||||
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
|
||||
* <big><big><big>∑</big></big></big>
|
||||
* </td>
|
||||
* <td valign="middle" align="right" rowspan="1">
|
||||
|
@ -354,72 +351,6 @@ import org.apache.lucene.util.BytesRef;
|
|||
* <br> <br>
|
||||
* </li>
|
||||
*
|
||||
* <li><b>
|
||||
* <A NAME="formula_queryNorm"></A>
|
||||
* <i>queryNorm(q)</i>
|
||||
* </b>
|
||||
* is a normalizing factor used to make scores between queries comparable.
|
||||
* This factor does not affect document ranking (since all ranked documents are multiplied by the same factor),
|
||||
* but rather just attempts to make scores from different queries (or even different indexes) comparable.
|
||||
* This is a search time factor computed by the Similarity in effect at search time.
|
||||
*
|
||||
* The default computation in
|
||||
* {@link org.apache.lucene.search.similarities.ClassicSimilarity#queryNorm(float) ClassicSimilarity}
|
||||
* produces a <a href="http://en.wikipedia.org/wiki/Euclidean_norm#Euclidean_norm">Euclidean norm</a>:
|
||||
* <br> <br>
|
||||
* <table cellpadding="1" cellspacing="0" border="0" style="width:auto; margin-left:auto; margin-right:auto" summary="query normalization computation">
|
||||
* <tr>
|
||||
* <td valign="middle" align="right" rowspan="1">
|
||||
* queryNorm(q) =
|
||||
* {@link org.apache.lucene.search.similarities.ClassicSimilarity#queryNorm(float) queryNorm(sumOfSquaredWeights)}
|
||||
* =
|
||||
* </td>
|
||||
* <td valign="middle" align="center" rowspan="1">
|
||||
* <table summary="query normalization computation">
|
||||
* <tr><td align="center" style="text-align: center"><big>1</big></td></tr>
|
||||
* <tr><td align="center" style="text-align: center"><big>
|
||||
* ––––––––––––––
|
||||
* </big></td></tr>
|
||||
* <tr><td align="center" style="text-align: center">sumOfSquaredWeights<sup><big>½</big></sup></td></tr>
|
||||
* </table>
|
||||
* </td>
|
||||
* </tr>
|
||||
* </table>
|
||||
* <br> <br>
|
||||
*
|
||||
* The sum of squared weights (of the query terms) is
|
||||
* computed by the query {@link org.apache.lucene.search.Weight} object.
|
||||
* For example, a {@link org.apache.lucene.search.BooleanQuery}
|
||||
* computes this value as:
|
||||
*
|
||||
* <br> <br>
|
||||
* <table cellpadding="1" cellspacing="0" border="0" style="width:auto; margin-left:auto; margin-right:auto" summary="sum of squared weights computation">
|
||||
* <tr>
|
||||
* <td valign="middle" align="right" rowspan="1">
|
||||
* {@link org.apache.lucene.search.Weight#getValueForNormalization() sumOfSquaredWeights} =
|
||||
* {@link org.apache.lucene.search.BoostQuery#getBoost() q.getBoost()} <sup><big>2</big></sup>
|
||||
* ·
|
||||
* </td>
|
||||
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
|
||||
* <big><big><big>∑</big></big></big>
|
||||
* </td>
|
||||
* <td valign="middle" align="right" rowspan="1">
|
||||
* <big><big>(</big></big>
|
||||
* <A HREF="#formula_idf">idf(t)</A> ·
|
||||
* <A HREF="#formula_termBoost">t.getBoost()</A>
|
||||
* <big><big>) <sup>2</sup> </big></big>
|
||||
* </td>
|
||||
* </tr>
|
||||
* <tr valign="top">
|
||||
* <td></td>
|
||||
* <td align="center" style="text-align: center"><small>t in q</small></td>
|
||||
* <td></td>
|
||||
* </tr>
|
||||
* </table>
|
||||
* <br> <br>
|
||||
*
|
||||
* </li>
|
||||
*
|
||||
* <li>
|
||||
* <A NAME="formula_termBoost"></A>
|
||||
* <b><i>t.getBoost()</i></b>
|
||||
|
@ -495,22 +426,6 @@ public abstract class TFIDFSimilarity extends Similarity {
|
|||
*/
|
||||
public TFIDFSimilarity() {}
|
||||
|
||||
/** Computes the normalization value for a query given the sum of the squared
|
||||
* weights of each of the query terms. This value is multiplied into the
|
||||
* weight of each query term. While the classic query normalization factor is
|
||||
* computed as 1/sqrt(sumOfSquaredWeights), other implementations might
|
||||
* completely ignore sumOfSquaredWeights (ie return 1).
|
||||
*
|
||||
* <p>This does not affect ranking, but the default implementation does make scores
|
||||
* from different queries more comparable than they would be by eliminating the
|
||||
* magnitude of the Query vector as a factor in the score.
|
||||
*
|
||||
* @param sumOfSquaredWeights the sum of the squares of query term weights
|
||||
* @return a normalization factor for query weights
|
||||
*/
|
||||
@Override
|
||||
public abstract float queryNorm(float sumOfSquaredWeights);
|
||||
|
||||
/** Computes a score factor based on a term or phrase's frequency in a
|
||||
* document. This value is multiplied by the {@link #idf(long, long)}
|
||||
* factor for each term in the query and these products are then summed to
|
||||
|
@ -652,11 +567,11 @@ public abstract class TFIDFSimilarity extends Similarity {
|
|||
public abstract float scorePayload(int doc, int start, int end, BytesRef payload);
|
||||
|
||||
@Override
|
||||
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public final SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
final Explanation idf = termStats.length == 1
|
||||
? idfExplain(collectionStats, termStats[0])
|
||||
: idfExplain(collectionStats, termStats);
|
||||
return new IDFStats(collectionStats.field(), idf);
|
||||
return new IDFStats(collectionStats.field(), boost, idf);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -672,7 +587,7 @@ public abstract class TFIDFSimilarity extends Similarity {
|
|||
|
||||
TFIDFSimScorer(IDFStats stats, NumericDocValues norms) throws IOException {
|
||||
this.stats = stats;
|
||||
this.weightValue = stats.value;
|
||||
this.weightValue = stats.queryWeight;
|
||||
this.norms = norms;
|
||||
}
|
||||
|
||||
|
@ -705,49 +620,18 @@ public abstract class TFIDFSimilarity extends Similarity {
|
|||
private final String field;
|
||||
/** The idf and its explanation */
|
||||
private final Explanation idf;
|
||||
private float queryNorm;
|
||||
private float boost;
|
||||
private float queryWeight;
|
||||
private float value;
|
||||
private final float boost;
|
||||
private final float queryWeight;
|
||||
|
||||
public IDFStats(String field, Explanation idf) {
|
||||
public IDFStats(String field, float boost, Explanation idf) {
|
||||
// TODO: Validate?
|
||||
this.field = field;
|
||||
this.idf = idf;
|
||||
normalize(1f, 1f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
// TODO: (sorta LUCENE-1907) make non-static class and expose this squaring via a nice method to subclasses?
|
||||
return queryWeight * queryWeight; // sum of squared weights
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
this.boost = boost;
|
||||
this.queryNorm = queryNorm;
|
||||
queryWeight = queryNorm * boost * idf.getValue();
|
||||
value = queryWeight * idf.getValue(); // idf for document
|
||||
this.queryWeight = boost * idf.getValue();
|
||||
}
|
||||
}
|
||||
|
||||
private Explanation explainQuery(IDFStats stats) {
|
||||
List<Explanation> subs = new ArrayList<>();
|
||||
|
||||
Explanation boostExpl = Explanation.match(stats.boost, "boost");
|
||||
if (stats.boost != 1.0f)
|
||||
subs.add(boostExpl);
|
||||
subs.add(stats.idf);
|
||||
|
||||
Explanation queryNormExpl = Explanation.match(stats.queryNorm,"queryNorm");
|
||||
subs.add(queryNormExpl);
|
||||
|
||||
return Explanation.match(
|
||||
boostExpl.getValue() * stats.idf.getValue() * queryNormExpl.getValue(),
|
||||
"queryWeight, product of:", subs);
|
||||
}
|
||||
|
||||
private Explanation explainField(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
|
||||
Explanation tfExplanation = Explanation.match(tf(freq.getValue()), "tf(freq="+freq.getValue()+"), with freq of:", freq);
|
||||
Explanation fieldNormExpl = Explanation.match(
|
||||
|
@ -761,9 +645,9 @@ public abstract class TFIDFSimilarity extends Similarity {
|
|||
}
|
||||
|
||||
private Explanation explainScore(int doc, Explanation freq, IDFStats stats, NumericDocValues norms) {
|
||||
Explanation queryExpl = explainQuery(stats);
|
||||
Explanation queryExpl = Explanation.match(stats.boost, "boost");
|
||||
Explanation fieldExpl = explainField(doc, freq, stats, norms);
|
||||
if (queryExpl.getValue() == 1f) {
|
||||
if (stats.boost == 1f) {
|
||||
return fieldExpl;
|
||||
}
|
||||
return Explanation.match(
|
||||
|
|
|
@ -89,8 +89,8 @@ public final class FieldMaskingSpanQuery extends SpanQuery {
|
|||
// ...this is done to be more consistent with things like SpanFirstQuery
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return maskedQuery.createWeight(searcher, needsScores);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return maskedQuery.createWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -18,17 +18,10 @@ package org.apache.lucene.search.spans;
|
|||
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermContext;
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
||||
|
@ -115,51 +108,8 @@ public final class SpanBoostQuery extends SpanQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final SpanWeight weight = query.createWeight(searcher, needsScores);
|
||||
if (needsScores == false) {
|
||||
return weight;
|
||||
}
|
||||
Map<Term, TermContext> terms = new TreeMap<>();
|
||||
weight.extractTermContexts(terms);
|
||||
weight.normalize(1f, boost);
|
||||
return new SpanWeight(this, searcher, terms) {
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
weight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return weight.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return weight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
weight.normalize(norm, SpanBoostQuery.this.boost * boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) throws IOException {
|
||||
return weight.getSpans(ctx, requiredPostings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpanScorer scorer(LeafReaderContext context) throws IOException {
|
||||
return weight.scorer(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTermContexts(Map<Term,TermContext> contexts) {
|
||||
weight.extractTermContexts(contexts);
|
||||
}
|
||||
};
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return query.createWeight(searcher, needsScores, SpanBoostQuery.this.boost * boost);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -62,8 +62,8 @@ abstract class SpanContainQuery extends SpanQuery implements Cloneable {
|
|||
final SpanWeight littleWeight;
|
||||
|
||||
public SpanContainWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
|
||||
SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
|
||||
super(SpanContainQuery.this, searcher, terms);
|
||||
SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
|
||||
super(SpanContainQuery.this, searcher, terms, boost);
|
||||
this.bigWeight = bigWeight;
|
||||
this.littleWeight = littleWeight;
|
||||
}
|
||||
|
|
|
@ -43,18 +43,18 @@ public final class SpanContainingQuery extends SpanContainQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
SpanWeight bigWeight = big.createWeight(searcher, false);
|
||||
SpanWeight littleWeight = little.createWeight(searcher, false);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
SpanWeight bigWeight = big.createWeight(searcher, false, boost);
|
||||
SpanWeight littleWeight = little.createWeight(searcher, false, boost);
|
||||
return new SpanContainingWeight(searcher, needsScores ? getTermContexts(bigWeight, littleWeight) : null,
|
||||
bigWeight, littleWeight);
|
||||
bigWeight, littleWeight, boost);
|
||||
}
|
||||
|
||||
public class SpanContainingWeight extends SpanContainWeight {
|
||||
|
||||
public SpanContainingWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
|
||||
SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
|
||||
super(searcher, terms, bigWeight, littleWeight);
|
||||
SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
|
||||
super(searcher, terms, bigWeight, littleWeight, boost);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -95,7 +95,7 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
throw new IllegalArgumentException("Rewrite first!");
|
||||
}
|
||||
|
||||
|
|
|
@ -176,20 +176,20 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
List<SpanWeight> subWeights = new ArrayList<>();
|
||||
for (SpanQuery q : clauses) {
|
||||
subWeights.add(q.createWeight(searcher, false));
|
||||
subWeights.add(q.createWeight(searcher, false, boost));
|
||||
}
|
||||
return new SpanNearWeight(subWeights, searcher, needsScores ? getTermContexts(subWeights) : null);
|
||||
return new SpanNearWeight(subWeights, searcher, needsScores ? getTermContexts(subWeights) : null, boost);
|
||||
}
|
||||
|
||||
public class SpanNearWeight extends SpanWeight {
|
||||
|
||||
final List<SpanWeight> subWeights;
|
||||
|
||||
public SpanNearWeight(List<SpanWeight> subWeights, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
|
||||
super(SpanNearQuery.this, searcher, terms);
|
||||
public SpanNearWeight(List<SpanWeight> subWeights, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
|
||||
super(SpanNearQuery.this, searcher, terms, boost);
|
||||
this.subWeights = subWeights;
|
||||
}
|
||||
|
||||
|
@ -295,14 +295,14 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new SpanGapWeight(searcher);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new SpanGapWeight(searcher, boost);
|
||||
}
|
||||
|
||||
private class SpanGapWeight extends SpanWeight {
|
||||
|
||||
SpanGapWeight(IndexSearcher searcher) throws IOException {
|
||||
super(SpanGapQuery.this, searcher, null);
|
||||
SpanGapWeight(IndexSearcher searcher, float boost) throws IOException {
|
||||
super(SpanGapQuery.this, searcher, null, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -93,11 +93,11 @@ public final class SpanNotQuery extends SpanQuery {
|
|||
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
SpanWeight includeWeight = include.createWeight(searcher, false);
|
||||
SpanWeight excludeWeight = exclude.createWeight(searcher, false);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
SpanWeight includeWeight = include.createWeight(searcher, false, boost);
|
||||
SpanWeight excludeWeight = exclude.createWeight(searcher, false, boost);
|
||||
return new SpanNotWeight(searcher, needsScores ? getTermContexts(includeWeight, excludeWeight) : null,
|
||||
includeWeight, excludeWeight);
|
||||
includeWeight, excludeWeight, boost);
|
||||
}
|
||||
|
||||
public class SpanNotWeight extends SpanWeight {
|
||||
|
@ -106,8 +106,8 @@ public final class SpanNotQuery extends SpanQuery {
|
|||
final SpanWeight excludeWeight;
|
||||
|
||||
public SpanNotWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
|
||||
SpanWeight includeWeight, SpanWeight excludeWeight) throws IOException {
|
||||
super(SpanNotQuery.this, searcher, terms);
|
||||
SpanWeight includeWeight, SpanWeight excludeWeight, float boost) throws IOException {
|
||||
super(SpanNotQuery.this, searcher, terms, boost);
|
||||
this.includeWeight = includeWeight;
|
||||
this.excludeWeight = excludeWeight;
|
||||
}
|
||||
|
|
|
@ -114,20 +114,20 @@ public final class SpanOrQuery extends SpanQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
List<SpanWeight> subWeights = new ArrayList<>(clauses.size());
|
||||
for (SpanQuery q : clauses) {
|
||||
subWeights.add(q.createWeight(searcher, false));
|
||||
subWeights.add(q.createWeight(searcher, false, boost));
|
||||
}
|
||||
return new SpanOrWeight(searcher, needsScores ? getTermContexts(subWeights) : null, subWeights);
|
||||
return new SpanOrWeight(searcher, needsScores ? getTermContexts(subWeights) : null, subWeights, boost);
|
||||
}
|
||||
|
||||
public class SpanOrWeight extends SpanWeight {
|
||||
|
||||
final List<SpanWeight> subWeights;
|
||||
|
||||
public SpanOrWeight(IndexSearcher searcher, Map<Term, TermContext> terms, List<SpanWeight> subWeights) throws IOException {
|
||||
super(SpanOrQuery.this, searcher, terms);
|
||||
public SpanOrWeight(IndexSearcher searcher, Map<Term, TermContext> terms, List<SpanWeight> subWeights, float boost) throws IOException {
|
||||
super(SpanOrQuery.this, searcher, terms, boost);
|
||||
this.subWeights = subWeights;
|
||||
}
|
||||
|
||||
|
|
|
@ -67,17 +67,17 @@ public abstract class SpanPositionCheckQuery extends SpanQuery implements Clonea
|
|||
protected abstract AcceptStatus acceptPosition(Spans spans) throws IOException;
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
SpanWeight matchWeight = match.createWeight(searcher, false);
|
||||
return new SpanPositionCheckWeight(matchWeight, searcher, needsScores ? getTermContexts(matchWeight) : null);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
SpanWeight matchWeight = match.createWeight(searcher, false, boost);
|
||||
return new SpanPositionCheckWeight(matchWeight, searcher, needsScores ? getTermContexts(matchWeight) : null, boost);
|
||||
}
|
||||
|
||||
public class SpanPositionCheckWeight extends SpanWeight {
|
||||
|
||||
final SpanWeight matchWeight;
|
||||
|
||||
public SpanPositionCheckWeight(SpanWeight matchWeight, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
|
||||
super(SpanPositionCheckQuery.this, searcher, terms);
|
||||
public SpanPositionCheckWeight(SpanWeight matchWeight, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
|
||||
super(SpanPositionCheckQuery.this, searcher, terms, boost);
|
||||
this.matchWeight = matchWeight;
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ public abstract class SpanQuery extends Query {
|
|||
public abstract String getField();
|
||||
|
||||
@Override
|
||||
public abstract SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException;
|
||||
public abstract SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException;
|
||||
|
||||
/**
|
||||
* Build a map of terms to termcontexts, for use in constructing SpanWeights
|
||||
|
|
|
@ -64,7 +64,7 @@ public class SpanTermQuery extends SpanQuery {
|
|||
public String getField() { return term.field(); }
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final TermContext context;
|
||||
final IndexReaderContext topContext = searcher.getTopReaderContext();
|
||||
if (termContext == null || termContext.topReaderContext != topContext) {
|
||||
|
@ -73,15 +73,15 @@ public class SpanTermQuery extends SpanQuery {
|
|||
else {
|
||||
context = termContext;
|
||||
}
|
||||
return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null);
|
||||
return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null, boost);
|
||||
}
|
||||
|
||||
public class SpanTermWeight extends SpanWeight {
|
||||
|
||||
final TermContext termContext;
|
||||
|
||||
public SpanTermWeight(TermContext termContext, IndexSearcher searcher, Map<Term, TermContext> terms) throws IOException {
|
||||
super(SpanTermQuery.this, searcher, terms);
|
||||
public SpanTermWeight(TermContext termContext, IndexSearcher searcher, Map<Term, TermContext> terms, float boost) throws IOException {
|
||||
super(SpanTermQuery.this, searcher, terms, boost);
|
||||
this.termContext = termContext;
|
||||
assert termContext != null : "TermContext must not be null";
|
||||
}
|
||||
|
|
|
@ -82,14 +82,14 @@ public abstract class SpanWeight extends Weight {
|
|||
* be null if scores are not required
|
||||
* @throws IOException on error
|
||||
*/
|
||||
public SpanWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts) throws IOException {
|
||||
public SpanWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts, float boost) throws IOException {
|
||||
super(query);
|
||||
this.field = query.getField();
|
||||
this.similarity = searcher.getSimilarity(termContexts != null);
|
||||
this.simWeight = buildSimWeight(query, searcher, termContexts);
|
||||
this.simWeight = buildSimWeight(query, searcher, termContexts, boost);
|
||||
}
|
||||
|
||||
private Similarity.SimWeight buildSimWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts) throws IOException {
|
||||
private Similarity.SimWeight buildSimWeight(SpanQuery query, IndexSearcher searcher, Map<Term, TermContext> termContexts, float boost) throws IOException {
|
||||
if (termContexts == null || termContexts.size() == 0 || query.getField() == null)
|
||||
return null;
|
||||
TermStatistics[] termStats = new TermStatistics[termContexts.size()];
|
||||
|
@ -99,7 +99,7 @@ public abstract class SpanWeight extends Weight {
|
|||
i++;
|
||||
}
|
||||
CollectionStatistics collectionStats = searcher.collectionStatistics(query.getField());
|
||||
return searcher.getSimilarity(true).computeWeight(collectionStats, termStats);
|
||||
return searcher.getSimilarity(true).computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -116,18 +116,6 @@ public abstract class SpanWeight extends Weight {
|
|||
*/
|
||||
public abstract Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) throws IOException;
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return simWeight == null ? 1.0f : simWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float boost) {
|
||||
if (simWeight != null) {
|
||||
simWeight.normalize(queryNorm, boost);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SpanScorer scorer(LeafReaderContext context) throws IOException {
|
||||
final Spans spans = getSpans(context, Postings.POSITIONS);
|
||||
|
|
|
@ -44,18 +44,18 @@ public final class SpanWithinQuery extends SpanContainQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
SpanWeight bigWeight = big.createWeight(searcher, false);
|
||||
SpanWeight littleWeight = little.createWeight(searcher, false);
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
SpanWeight bigWeight = big.createWeight(searcher, false, boost);
|
||||
SpanWeight littleWeight = little.createWeight(searcher, false, boost);
|
||||
return new SpanWithinWeight(searcher, needsScores ? getTermContexts(bigWeight, littleWeight) : null,
|
||||
bigWeight, littleWeight);
|
||||
bigWeight, littleWeight, boost);
|
||||
}
|
||||
|
||||
public class SpanWithinWeight extends SpanContainWeight {
|
||||
|
||||
public SpanWithinWeight(IndexSearcher searcher, Map<Term, TermContext> terms,
|
||||
SpanWeight bigWeight, SpanWeight littleWeight) throws IOException {
|
||||
super(searcher, terms, bigWeight, littleWeight);
|
||||
SpanWeight bigWeight, SpanWeight littleWeight, float boost) throws IOException {
|
||||
super(searcher, terms, bigWeight, littleWeight, boost);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -82,11 +82,6 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
public class MySimProvider extends PerFieldSimilarityWrapper {
|
||||
Similarity delegate = new ClassicSimilarity();
|
||||
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return delegate.queryNorm(sumOfSquaredWeights);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
if (floatTestField.equals(field)) {
|
||||
|
@ -105,7 +100,7 @@ public class TestCustomNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -992,8 +992,8 @@ public class TestIndexSorting extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return in.computeWeight(collectionStats, termStats);
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return in.computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -115,7 +115,6 @@ public class TestMaxTermFrequency extends LuceneTestCase {
|
|||
return norm;
|
||||
}
|
||||
|
||||
@Override public float queryNorm(float sumOfSquaredWeights) { return 0; }
|
||||
@Override public float tf(float freq) { return 0; }
|
||||
@Override public float idf(long docFreq, long docCount) { return 0; }
|
||||
@Override public float sloppyFreq(int distance) { return 0; }
|
||||
|
|
|
@ -64,7 +64,6 @@ public class TestNorms extends LuceneTestCase {
|
|||
return state.getLength();
|
||||
}
|
||||
|
||||
@Override public float queryNorm(float sumOfSquaredWeights) { return 0; }
|
||||
@Override public float tf(float freq) { return 0; }
|
||||
@Override public float idf(long docFreq, long docCount) { return 0; }
|
||||
@Override public float sloppyFreq(int distance) { return 0; }
|
||||
|
@ -155,12 +154,6 @@ public class TestNorms extends LuceneTestCase {
|
|||
public class MySimProvider extends PerFieldSimilarityWrapper {
|
||||
Similarity delegate = new ClassicSimilarity();
|
||||
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
|
||||
return delegate.queryNorm(sumOfSquaredWeights);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Similarity get(String field) {
|
||||
if (byteTestField.equals(field)) {
|
||||
|
@ -181,7 +174,7 @@ public class TestNorms extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -46,8 +46,6 @@ public class TestOmitTf extends LuceneTestCase {
|
|||
public static class SimpleSimilarity extends TFIDFSimilarity {
|
||||
@Override public float decodeNormValue(long norm) { return norm; }
|
||||
@Override public long encodeNormValue(float f) { return (long) f; }
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
|
||||
@Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
|
||||
@Override public float tf(float freq) { return freq; }
|
||||
@Override public float sloppyFreq(int distance) { return 2.0f; }
|
||||
|
|
|
@ -105,7 +105,7 @@ public class TestUniqueTermCount extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -194,7 +194,7 @@ final class JustCompileSearch {
|
|||
static final class JustCompileSimilarity extends Similarity {
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
}
|
||||
|
||||
|
@ -257,16 +257,6 @@ final class JustCompileSearch {
|
|||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) {
|
||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
|
|
|
@ -77,7 +77,7 @@ public class TestBooleanScorer extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new Weight(CrazyMustUseBulkScorerQuery.this) {
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
|
@ -89,15 +89,6 @@ public class TestBooleanScorer extends LuceneTestCase {
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
|
|
@ -33,14 +33,11 @@ import org.apache.lucene.search.spans.*;
|
|||
*/
|
||||
public class TestComplexExplanations extends BaseExplanationTestCase {
|
||||
|
||||
/**
|
||||
* Override the Similarity used in our searcher with one that plays
|
||||
* nice with boosts of 0.0
|
||||
*/
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
searcher.setSimilarity(createQnorm1Similarity());
|
||||
// TODO: switch to BM25?
|
||||
searcher.setSimilarity(new ClassicSimilarity());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -49,16 +46,6 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
|
|||
super.tearDown();
|
||||
}
|
||||
|
||||
// must be static for weight serialization tests
|
||||
private static ClassicSimilarity createQnorm1Similarity() {
|
||||
return new ClassicSimilarity() {
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return 1.0f; // / (float) Math.sqrt(1.0f + sumOfSquaredWeights);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
public void test1() throws Exception {
|
||||
|
||||
|
|
|
@ -94,18 +94,9 @@ public class TestConjunctions extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(
|
||||
public SimWeight computeWeight(float boost,
|
||||
CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return new SimWeight() {
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return 1; // we don't care
|
||||
}
|
||||
@Override
|
||||
public void normalize(float queryNorm, float topLevelBoost) {
|
||||
// we don't care
|
||||
}
|
||||
};
|
||||
return new SimWeight() {};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.lucene.index.MultiReader;
|
|||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -100,14 +99,6 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
|||
searcher = newSearcher(reader, true, false);
|
||||
searcher.setQueryCache(null); // to assert on scorer impl
|
||||
|
||||
// set a similarity that does not normalize our boost away
|
||||
searcher.setSimilarity(new ClassicSimilarity() {
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return 1.0f;
|
||||
}
|
||||
});
|
||||
|
||||
final BoostQuery csq1 = new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term ("field", "term"))), 2f);
|
||||
final BoostQuery csq2 = new BoostQuery(new ConstantScoreQuery(csq1), 5f);
|
||||
|
||||
|
@ -143,8 +134,8 @@ public class TestConstantScoreQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return in.createWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return in.createWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -78,11 +78,6 @@ public class TestDocValuesScoring extends LuceneTestCase {
|
|||
public Similarity get(String field) {
|
||||
return "foo".equals(field) ? fooSim : base;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return base.queryNorm(sumOfSquaredWeights);
|
||||
}
|
||||
});
|
||||
|
||||
// in this case, we searched on field "foo". first document should have 2x the score.
|
||||
|
@ -148,8 +143,8 @@ public class TestDocValuesScoring extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return sim.computeWeight(collectionStats, termStats);
|
||||
public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return sim.computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -347,8 +347,8 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return null;
|
||||
|
@ -932,8 +932,8 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
int[] i = new int[] {42}; // an array so that clone keeps the reference
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return null;
|
||||
|
@ -1111,16 +1111,6 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
return in.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
in.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
scorerCalled.set(true);
|
||||
|
|
|
@ -89,12 +89,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
|||
r = DirectoryReader.open(dir);
|
||||
reader = getOnlyLeafReader(r);
|
||||
searcher = new IndexSearcher(reader);
|
||||
searcher.setSimilarity(new ClassicSimilarity() {
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return 1; // we disable queryNorm, both for debugging and ease of impl
|
||||
}
|
||||
});
|
||||
searcher.setSimilarity(new ClassicSimilarity());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
|
@ -338,11 +333,9 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
|
|||
boolean success = ords.add(ord);
|
||||
assert success; // no dups
|
||||
TermContext context = TermContext.build(reader.getContext(), term);
|
||||
SimWeight w = weight.similarity.computeWeight(
|
||||
SimWeight w = weight.similarity.computeWeight(1f,
|
||||
searcher.collectionStatistics("field"),
|
||||
searcher.termStatistics(term, context));
|
||||
w.getValueForNormalization(); // ignored
|
||||
w.normalize(1F, 1F);
|
||||
sims[(int)ord] = weight.similarity.simScorer(w, reader.getContext());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.lucene.index.MultiFields;
|
|||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -336,32 +335,6 @@ public class TestMultiPhraseQuery extends LuceneTestCase {
|
|||
public void testEmptyToString() {
|
||||
new MultiPhraseQuery.Builder().build().toString();
|
||||
}
|
||||
|
||||
public void testCustomIDF() throws Exception {
|
||||
Directory indexStore = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random(), indexStore);
|
||||
add("This is a test", "object", writer);
|
||||
add("a note", "note", writer);
|
||||
|
||||
IndexReader reader = writer.getReader();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
searcher.setSimilarity(new ClassicSimilarity() {
|
||||
@Override
|
||||
public Explanation idfExplain(CollectionStatistics collectionStats, TermStatistics termStats[]) {
|
||||
return Explanation.match(10f, "just a test");
|
||||
}
|
||||
});
|
||||
|
||||
MultiPhraseQuery.Builder queryBuilder = new MultiPhraseQuery.Builder();
|
||||
queryBuilder.add(new Term[] { new Term("body", "this"), new Term("body", "that") });
|
||||
queryBuilder.add(new Term("body", "is"));
|
||||
Weight weight = queryBuilder.build().createWeight(searcher, true);
|
||||
assertEquals(10f * 10f, weight.getValueForNormalization(), 0.001f);
|
||||
|
||||
writer.close();
|
||||
reader.close();
|
||||
indexStore.close();
|
||||
}
|
||||
|
||||
public void testZeroPosIncr() throws IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
|
|
|
@ -23,11 +23,9 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.similarities.ClassicSimilarity;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -214,76 +212,6 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBoost() throws IOException {
|
||||
// NOTE: uses index build in *this* setUp
|
||||
|
||||
IndexSearcher search = newSearcher(reader);
|
||||
|
||||
// test for correct application of query normalization
|
||||
// must use a non score normalizing method for this.
|
||||
|
||||
search.setSimilarity(new ClassicSimilarity());
|
||||
Query q = csrq("data", "1", "6", T, T);
|
||||
search.search(new BoostQuery(q, 100), new SimpleCollector() {
|
||||
private int base = 0;
|
||||
private Scorer scorer;
|
||||
@Override
|
||||
public void setScorer(Scorer scorer) {
|
||||
this.scorer = scorer;
|
||||
}
|
||||
@Override
|
||||
public void collect(int doc) throws IOException {
|
||||
assertEquals("score for doc " + (doc + base) + " was not correct", 1.0f, scorer.score(), SCORE_COMP_THRESH);
|
||||
}
|
||||
@Override
|
||||
protected void doSetNextReader(LeafReaderContext context) throws IOException {
|
||||
base = context.docBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean needsScores() {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// Ensure that boosting works to score one clause of a query higher
|
||||
// than another.
|
||||
//
|
||||
Query q1 = new BoostQuery(csrq("data", "A", "A", T, T), .1f); // matches document #0
|
||||
Query q2 = csrq("data", "Z", "Z", T, T); // matches document #1
|
||||
BooleanQuery.Builder bq = new BooleanQuery.Builder();
|
||||
bq.add(q1, BooleanClause.Occur.SHOULD);
|
||||
bq.add(q2, BooleanClause.Occur.SHOULD);
|
||||
|
||||
ScoreDoc[] hits = search.search(bq.build(), 1000).scoreDocs;
|
||||
Assert.assertEquals(1, hits[0].doc);
|
||||
Assert.assertEquals(0, hits[1].doc);
|
||||
assertTrue(hits[0].score > hits[1].score);
|
||||
|
||||
q1 = new BoostQuery(csrq("data", "A", "A", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE), .1f); // matches document #0
|
||||
q2 = csrq("data", "Z", "Z", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE); // matches document #1
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(q1, BooleanClause.Occur.SHOULD);
|
||||
bq.add(q2, BooleanClause.Occur.SHOULD);
|
||||
|
||||
hits = search.search(bq.build(), 1000).scoreDocs;
|
||||
Assert.assertEquals(1, hits[0].doc);
|
||||
Assert.assertEquals(0, hits[1].doc);
|
||||
assertTrue(hits[0].score > hits[1].score);
|
||||
|
||||
q1 = new BoostQuery(csrq("data", "A", "A", T, T), 10f); // matches document #0
|
||||
q2 = csrq("data", "Z", "Z", T, T); // matches document #1
|
||||
bq = new BooleanQuery.Builder();
|
||||
bq.add(q1, BooleanClause.Occur.SHOULD);
|
||||
bq.add(q2, BooleanClause.Occur.SHOULD);
|
||||
|
||||
hits = search.search(bq.build(), 1000).scoreDocs;
|
||||
Assert.assertEquals(0, hits[0].doc);
|
||||
Assert.assertEquals(1, hits[1].doc);
|
||||
assertTrue(hits[0].score > hits[1].score);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBooleanOrderUnAffected() throws IOException {
|
||||
|
|
|
@ -101,8 +101,8 @@ public class TestNeedsScores extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight w = in.createWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight w = in.createWeight(searcher, needsScores, boost);
|
||||
return new Weight(AssertNeedsScores.this) {
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
|
@ -114,16 +114,6 @@ public class TestNeedsScores extends LuceneTestCase {
|
|||
return w.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return w.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
w.normalize(norm, topLevelBoost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
assertEquals("query=" + in, value, needsScores);
|
||||
|
|
|
@ -252,7 +252,7 @@ public class TestPositionIncrement extends LuceneTestCase {
|
|||
System.out.println("\ngetPayloadSpans test");
|
||||
}
|
||||
PayloadSpanCollector collector = new PayloadSpanCollector();
|
||||
Spans pspans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
|
||||
Spans pspans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.PAYLOADS);
|
||||
while (pspans.nextDoc() != Spans.NO_MORE_DOCS) {
|
||||
while (pspans.nextStartPosition() != Spans.NO_MORE_POSITIONS) {
|
||||
if (VERBOSE) {
|
||||
|
@ -274,7 +274,7 @@ public class TestPositionIncrement extends LuceneTestCase {
|
|||
assertEquals(8, count);
|
||||
|
||||
// System.out.println("\ngetSpans test");
|
||||
Spans spans = snq.createWeight(is, false).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = snq.createWeight(is, false, 1f).getSpans(is.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
count = 0;
|
||||
sawZero = false;
|
||||
while (spans.nextDoc() != Spans.NO_MORE_DOCS) {
|
||||
|
|
|
@ -94,7 +94,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
|
|||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
|
||||
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true, 1f);
|
||||
Scorer s = new SimpleScorer(fake);
|
||||
TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length);
|
||||
Collector c = new PositiveScoresOnlyCollector(tdc);
|
||||
|
|
|
@ -412,7 +412,7 @@ public class TestQueryRescorer extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
|
||||
return new Weight(FixedScoreQuery.this) {
|
||||
|
||||
|
@ -420,15 +420,6 @@ public class TestQueryRescorer extends LuceneTestCase {
|
|||
public void extractTerms(Set<Term> terms) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float queryNorm, float topLevelBoost) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(final LeafReaderContext context) throws IOException {
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
|
|||
IndexReader ir = writer.getReader();
|
||||
writer.close();
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true);
|
||||
Weight fake = new TermQuery(new Term("fake", "weight")).createWeight(searcher, true, 1f);
|
||||
Scorer s = new SimpleScorer(fake);
|
||||
ScoreCachingCollector scc = new ScoreCachingCollector(scores.length);
|
||||
scc.setScorer(s);
|
||||
|
|
|
@ -149,8 +149,8 @@ public class TestScorerPerf extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return new ConstantScoreScorer(this, score(), new BitSetIterator(docs, docs.approximateCardinality()));
|
||||
|
|
|
@ -39,8 +39,6 @@ import org.apache.lucene.document.Document;
|
|||
public class TestSimilarity extends LuceneTestCase {
|
||||
|
||||
public static class SimpleSimilarity extends ClassicSimilarity {
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) { return 1.0f; }
|
||||
@Override public float lengthNorm(FieldInvertState state) { return state.getBoost(); }
|
||||
@Override public float tf(float freq) { return freq; }
|
||||
@Override public float sloppyFreq(int distance) { return 2.0f; }
|
||||
|
|
|
@ -114,11 +114,6 @@ public class TestSimilarityProvider extends LuceneTestCase {
|
|||
return norm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float lengthNorm(FieldInvertState state) {
|
||||
return 1f;
|
||||
|
@ -156,11 +151,6 @@ public class TestSimilarityProvider extends LuceneTestCase {
|
|||
public float decodeNormValue(long norm) {
|
||||
return norm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float queryNorm(float sumOfSquaredWeights) {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float lengthNorm(FieldInvertState state) {
|
||||
|
|
|
@ -229,8 +229,8 @@ public class TestSortRandom extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Random random = new Random(context.docBase ^ seed);
|
||||
|
|
|
@ -172,7 +172,7 @@ public class TestSimilarityBase extends LuceneTestCase {
|
|||
|
||||
/** Creates the default statistics object that the specific tests modify. */
|
||||
private BasicStats createStats() {
|
||||
BasicStats stats = new BasicStats("spoof");
|
||||
BasicStats stats = new BasicStats("spoof", 1f);
|
||||
stats.setNumberOfDocuments(NUMBER_OF_DOCUMENTS);
|
||||
stats.setNumberOfFieldTokens(NUMBER_OF_FIELD_TOKENS);
|
||||
stats.setAvgFieldLength(AVG_FIELD_LENGTH);
|
||||
|
@ -197,9 +197,9 @@ public class TestSimilarityBase extends LuceneTestCase {
|
|||
private void unitTestCore(BasicStats stats, float freq, int docLen) {
|
||||
for (SimilarityBase sim : sims) {
|
||||
BasicStats realStats = (BasicStats) sim.computeWeight(
|
||||
stats.getBoost(),
|
||||
toCollectionStats(stats),
|
||||
toTermStats(stats));
|
||||
realStats.normalize(1f, stats.getBoost());
|
||||
float score = sim.score(realStats, freq, docLen);
|
||||
float explScore = sim.explain(
|
||||
realStats, 1, Explanation.match(freq, "freq"), docLen).getValue();
|
||||
|
@ -530,9 +530,9 @@ public class TestSimilarityBase extends LuceneTestCase {
|
|||
private void correctnessTestCore(SimilarityBase sim, float gold) {
|
||||
BasicStats stats = createStats();
|
||||
BasicStats realStats = (BasicStats) sim.computeWeight(
|
||||
stats.getBoost(),
|
||||
toCollectionStats(stats),
|
||||
toTermStats(stats));
|
||||
realStats.normalize(1f, stats.getBoost());
|
||||
float score = sim.score(realStats, FREQ, DOC_LEN);
|
||||
assertEquals(
|
||||
sim.toString() + " score not correct.", gold, score, FLOAT_EPSILON);
|
||||
|
@ -648,7 +648,7 @@ public class TestSimilarityBase extends LuceneTestCase {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
BasicStats stats = (BasicStats) sim.computeWeight(collectionStats, termStats);
|
||||
BasicStats stats = (BasicStats) sim.computeWeight(1f, collectionStats, termStats);
|
||||
for (float tf = 1.0f; tf <= 10.0f; tf += 1.0f) {
|
||||
for (int i = 0; i < 256; i++) {
|
||||
float len = sim.decodeNormValue((byte) i);
|
||||
|
|
|
@ -93,7 +93,7 @@ final class JustCompileSearchSpans {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
|
||||
}
|
||||
|
||||
|
|
|
@ -142,7 +142,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
QueryUtils.checkEqual(q, qr);
|
||||
|
||||
Set<Term> terms = new HashSet<>();
|
||||
qr.createWeight(searcher, false).extractTerms(terms);
|
||||
qr.createWeight(searcher, false, 1f).extractTerms(terms);
|
||||
assertEquals(1, terms.size());
|
||||
}
|
||||
|
||||
|
@ -162,7 +162,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
QueryUtils.checkUnequal(q, qr);
|
||||
|
||||
Set<Term> terms = new HashSet<>();
|
||||
qr.createWeight(searcher, false).extractTerms(terms);
|
||||
qr.createWeight(searcher, false, 1f).extractTerms(terms);
|
||||
assertEquals(2, terms.size());
|
||||
}
|
||||
|
||||
|
@ -176,7 +176,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
QueryUtils.checkEqual(q, qr);
|
||||
|
||||
HashSet<Term> set = new HashSet<>();
|
||||
qr.createWeight(searcher, true).extractTerms(set);
|
||||
qr.createWeight(searcher, true, 1f).extractTerms(set);
|
||||
assertEquals(2, set.size());
|
||||
}
|
||||
|
||||
|
@ -252,7 +252,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
SpanQuery q = new SpanOrQuery(q1, new FieldMaskingSpanQuery(q2, "gender"));
|
||||
check(q, new int[] { 0, 1, 2, 3, 4 });
|
||||
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(span, 0,0,1);
|
||||
assertNext(span, 1,0,1);
|
||||
assertNext(span, 1,1,2);
|
||||
|
@ -274,8 +274,8 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
check(qA, new int[] { 0, 1, 2, 4 });
|
||||
check(qB, new int[] { 0, 1, 2, 4 });
|
||||
|
||||
Spans spanA = qA.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spanB = qB.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spanA = qA.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spanB = qB.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
|
||||
while (spanA.nextDoc() != Spans.NO_MORE_DOCS) {
|
||||
assertNotSame("spanB not still going", Spans.NO_MORE_DOCS, spanB.nextDoc());
|
||||
|
@ -300,7 +300,7 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
|
|||
new FieldMaskingSpanQuery(qB, "id") }, -1, false );
|
||||
check(q, new int[] { 0, 1, 2, 3 });
|
||||
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(span, 0,0,1);
|
||||
assertNext(span, 1,1,2);
|
||||
assertNext(span, 2,0,1);
|
||||
|
|
|
@ -121,7 +121,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
|
||||
public void testNearSpansNext() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(span,0,0,3);
|
||||
assertNext(span,1,0,4);
|
||||
assertFinished(span);
|
||||
|
@ -134,7 +134,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
*/
|
||||
public void testNearSpansAdvanceLikeNext() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertEquals(0, span.advance(0));
|
||||
assertEquals(0, span.nextStartPosition());
|
||||
assertEquals(s(0,0,3), s(span));
|
||||
|
@ -146,7 +146,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
|
||||
public void testNearSpansNextThenAdvance() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
|
||||
assertEquals(0, span.nextStartPosition());
|
||||
assertEquals(s(0,0,3), s(span));
|
||||
|
@ -158,7 +158,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
|
||||
public void testNearSpansNextThenAdvancePast() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNotSame(Spans.NO_MORE_DOCS, span.nextDoc());
|
||||
assertEquals(0, span.nextStartPosition());
|
||||
assertEquals(s(0,0,3), s(span));
|
||||
|
@ -167,13 +167,13 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
|
||||
public void testNearSpansAdvancePast() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertEquals(Spans.NO_MORE_DOCS, span.advance(2));
|
||||
}
|
||||
|
||||
public void testNearSpansAdvanceTo0() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertEquals(0, span.advance(0));
|
||||
assertEquals(0, span.nextStartPosition());
|
||||
assertEquals(s(0,0,3), s(span));
|
||||
|
@ -181,7 +181,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
|
||||
public void testNearSpansAdvanceTo1() throws Exception {
|
||||
SpanNearQuery q = makeQuery();
|
||||
Spans span = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans span = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertEquals(1, span.advance(1));
|
||||
assertEquals(0, span.nextStartPosition());
|
||||
assertEquals(s(1,0,4), s(span));
|
||||
|
@ -220,7 +220,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
new SpanOrQuery(new SpanTermQuery(new Term(FIELD, "w1")), new SpanTermQuery(new Term(FIELD, "w2"))),
|
||||
new SpanTermQuery(new Term(FIELD, "w4"))
|
||||
}, 10, true);
|
||||
Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans,0,0,4);
|
||||
assertNext(spans,0,1,4);
|
||||
assertFinished(spans);
|
||||
|
@ -230,7 +230,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanTermQuery(new Term(FIELD, "t1")), new SpanTermQuery(new Term(FIELD, "t2"))
|
||||
}, 1, true);
|
||||
Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans,4,0,2);
|
||||
assertFinished(spans);
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
SpanNearQuery q = new SpanNearQuery(new SpanQuery[]{
|
||||
new SpanTermQuery(new Term(FIELD, "t2")), new SpanTermQuery(new Term(FIELD, "t1"))
|
||||
}, 1, true);
|
||||
Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans,4,1,4);
|
||||
assertNext(spans,4,2,4);
|
||||
assertFinished(spans);
|
||||
|
@ -263,7 +263,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
.addGap(1)
|
||||
.addClause(new SpanTermQuery(new Term(FIELD, "w2")))
|
||||
.build();
|
||||
Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans, 1, 0, 3);
|
||||
assertNext(spans, 2, 0, 3);
|
||||
assertFinished(spans);
|
||||
|
@ -276,7 +276,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
.addClause(new SpanTermQuery(new Term(FIELD, "w3")))
|
||||
.setSlop(1)
|
||||
.build();
|
||||
spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans, 2, 0, 5);
|
||||
assertNext(spans, 3, 0, 6);
|
||||
assertFinished(spans);
|
||||
|
@ -288,7 +288,7 @@ public class TestNearSpansOrdered extends LuceneTestCase {
|
|||
.addGap(2)
|
||||
.addClause(new SpanTermQuery(new Term(FIELD, "g")))
|
||||
.build();
|
||||
Spans spans = q.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans, 5, 0, 4);
|
||||
assertNext(spans, 5, 9, 13);
|
||||
assertFinished(spans);
|
||||
|
|
|
@ -119,7 +119,7 @@ public class TestSpanCollection extends LuceneTestCase {
|
|||
SpanNearQuery q7 = new SpanNearQuery(new SpanQuery[]{q1, q6}, 1, true);
|
||||
|
||||
TermCollector collector = new TermCollector();
|
||||
Spans spans = q7.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = q7.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertEquals(0, spans.advance(0));
|
||||
spans.nextStartPosition();
|
||||
checkCollectedTerms(spans, collector, new Term(FIELD, "w1"), new Term(FIELD, "w2"), new Term(FIELD, "w3"));
|
||||
|
@ -139,7 +139,7 @@ public class TestSpanCollection extends LuceneTestCase {
|
|||
SpanOrQuery orQuery = new SpanOrQuery(q2, q3);
|
||||
|
||||
TermCollector collector = new TermCollector();
|
||||
Spans spans = orQuery.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = orQuery.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
|
||||
assertEquals(1, spans.advance(1));
|
||||
spans.nextStartPosition();
|
||||
|
@ -169,7 +169,7 @@ public class TestSpanCollection extends LuceneTestCase {
|
|||
SpanNotQuery notq = new SpanNotQuery(nq, q3);
|
||||
|
||||
TermCollector collector = new TermCollector();
|
||||
Spans spans = notq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = notq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
|
||||
assertEquals(2, spans.advance(2));
|
||||
spans.nextStartPosition();
|
||||
|
|
|
@ -72,7 +72,7 @@ public class TestSpanContainQuery extends LuceneTestCase {
|
|||
}
|
||||
|
||||
Spans makeSpans(SpanQuery sq) throws Exception {
|
||||
return sq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
return sq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
}
|
||||
|
||||
void tstEqualSpans(String mes, SpanQuery expectedQ, SpanQuery actualQ) throws Exception {
|
||||
|
|
|
@ -202,7 +202,7 @@ public class TestSpans extends LuceneTestCase {
|
|||
public void testSpanNearOrderedOverlap() throws Exception {
|
||||
final SpanQuery query = spanNearOrderedQuery(field, 1, "t1", "t2", "t3");
|
||||
|
||||
Spans spans = query.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = query.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
|
||||
assertEquals("first doc", 11, spans.nextDoc());
|
||||
assertEquals("first start", 0, spans.nextStartPosition());
|
||||
|
@ -217,7 +217,7 @@ public class TestSpans extends LuceneTestCase {
|
|||
public void testSpanNearUnOrdered() throws Exception {
|
||||
//See http://www.gossamer-threads.com/lists/lucene/java-dev/52270 for discussion about this test
|
||||
SpanQuery senq = spanNearUnorderedQuery(field, 0, "u1", "u2");
|
||||
Spans spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = senq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans, 4, 1, 3);
|
||||
assertNext(spans, 5, 2, 4);
|
||||
assertNext(spans, 8, 2, 4);
|
||||
|
@ -226,7 +226,7 @@ public class TestSpans extends LuceneTestCase {
|
|||
assertFinished(spans);
|
||||
|
||||
senq = spanNearUnorderedQuery(1, senq, spanTermQuery(field, "u2"));
|
||||
spans = senq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
spans = senq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
assertNext(spans, 4, 0, 3);
|
||||
assertNext(spans, 4, 1, 3); // unordered spans can be subsets
|
||||
assertNext(spans, 5, 0, 4);
|
||||
|
@ -240,7 +240,7 @@ public class TestSpans extends LuceneTestCase {
|
|||
}
|
||||
|
||||
private Spans orSpans(String[] terms) throws Exception {
|
||||
return spanOrQuery(field, terms).createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
return spanOrQuery(field, terms).createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
}
|
||||
|
||||
public void testSpanOrEmpty() throws Exception {
|
||||
|
@ -444,7 +444,7 @@ public class TestSpans extends LuceneTestCase {
|
|||
SpanQuery iq = spanTermQuery(field, include);
|
||||
SpanQuery eq = spanTermQuery(field, exclude);
|
||||
SpanQuery snq = spanNotQuery(iq, eq, pre, post);
|
||||
Spans spans = snq.createWeight(searcher, false).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
Spans spans = snq.createWeight(searcher, false, 1f).getSpans(searcher.getIndexReader().leaves().get(0), SpanWeight.Postings.POSITIONS);
|
||||
|
||||
int i = 0;
|
||||
if (spans != null) {
|
||||
|
|
|
@ -79,8 +79,8 @@ class DrillSidewaysQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight baseWeight = baseQuery.createWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight baseWeight = baseQuery.createWeight(searcher, needsScores, boost);
|
||||
final Weight[] drillDowns = new Weight[drillDownQueries.length];
|
||||
for(int dim=0;dim<drillDownQueries.length;dim++) {
|
||||
drillDowns[dim] = searcher.createNormalizedWeight(drillDownQueries[dim], false);
|
||||
|
@ -95,16 +95,6 @@ class DrillSidewaysQuery extends Query {
|
|||
return baseWeight.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return baseWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
baseWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
// We can only run as a top scorer:
|
||||
|
|
|
@ -137,12 +137,12 @@ public final class DoubleRange extends Range {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight fastMatchWeight = fastMatchQuery == null
|
||||
? null
|
||||
: searcher.createWeight(fastMatchQuery, false);
|
||||
: searcher.createWeight(fastMatchQuery, false, 1f);
|
||||
|
||||
return new ConstantScoreWeight(this) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
|
|
|
@ -129,12 +129,12 @@ public final class LongRange extends Range {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight fastMatchWeight = fastMatchQuery == null
|
||||
? null
|
||||
: searcher.createWeight(fastMatchQuery, false);
|
||||
: searcher.createWeight(fastMatchQuery, false, 1f);
|
||||
|
||||
return new ConstantScoreWeight(this) {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
final int maxDoc = context.reader().maxDoc();
|
||||
|
|
|
@ -650,8 +650,8 @@ public class TestDrillSideways extends FacetTestCase {
|
|||
filter = new Query() {
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new RandomAccessWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new RandomAccessWeight(this, boost) {
|
||||
@Override
|
||||
protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException {
|
||||
return new Bits() {
|
||||
|
|
|
@ -678,8 +678,8 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight in = this.in.createWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
final Weight in = this.in.createWeight(searcher, needsScores, boost);
|
||||
return new Weight(in.getQuery()) {
|
||||
|
||||
@Override
|
||||
|
@ -692,16 +692,6 @@ public class TestRangeFacetCounts extends FacetTestCase {
|
|||
return in.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
in.normalize(norm, topLevelBoost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
used.set(true);
|
||||
|
|
|
@ -288,10 +288,10 @@ public class WeightedSpanTermExtractor {
|
|||
for (final String field : fieldNames) {
|
||||
final SpanQuery rewrittenQuery = (SpanQuery) spanQuery.rewrite(getLeafContext().reader());
|
||||
queries.put(field, rewrittenQuery);
|
||||
rewrittenQuery.createWeight(searcher, false).extractTerms(nonWeightedTerms);
|
||||
rewrittenQuery.createWeight(searcher, false, boost).extractTerms(nonWeightedTerms);
|
||||
}
|
||||
} else {
|
||||
spanQuery.createWeight(searcher, false).extractTerms(nonWeightedTerms);
|
||||
spanQuery.createWeight(searcher, false, boost).extractTerms(nonWeightedTerms);
|
||||
}
|
||||
|
||||
List<PositionSpan> spanPositions = new ArrayList<>();
|
||||
|
|
|
@ -60,8 +60,8 @@ final class GlobalOrdinalsQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new W(this, toQuery.createWeight(searcher, false));
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new W(this, toQuery.createWeight(searcher, false, 1f), boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -98,8 +98,8 @@ final class GlobalOrdinalsQuery extends Query {
|
|||
|
||||
private final Weight approximationWeight;
|
||||
|
||||
W(Query query, Weight approximationWeight) {
|
||||
super(query);
|
||||
W(Query query, Weight approximationWeight, float boost) {
|
||||
super(query, boost);
|
||||
this.approximationWeight = approximationWeight;
|
||||
}
|
||||
|
||||
|
|
|
@ -61,8 +61,8 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new W(this, toQuery.createWeight(searcher, false));
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new W(this, toQuery.createWeight(searcher, false, 1f));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -141,17 +141,6 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
|||
return Explanation.match(score, "A match, join value " + Term.toString(joinValue));
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
// no normalization, we ignore the normalization process
|
||||
// and produce scores based on the join
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
SortedDocValues values = DocValues.getSorted(context.reader(), joinField);
|
||||
|
|
|
@ -118,8 +118,7 @@ abstract class PointInSetIncludingScoreQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
|
||||
public final Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
|
@ -138,16 +137,6 @@ abstract class PointInSetIncludingScoreQuery extends Query {
|
|||
return Explanation.noMatch("Not a match");
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return originalWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
originalWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
LeafReader reader = context.reader();
|
||||
|
|
|
@ -106,8 +106,7 @@ class TermsIncludingScoreQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
final Weight originalWeight = originalQuery.createWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new Weight(TermsIncludingScoreQuery.this) {
|
||||
|
||||
@Override
|
||||
|
@ -133,16 +132,6 @@ class TermsIncludingScoreQuery extends Query {
|
|||
return Explanation.noMatch("Not a match");
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return originalWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
originalWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Terms terms = context.reader().terms(field);
|
||||
|
|
|
@ -81,8 +81,8 @@ public class ToChildBlockJoinQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores), parentsFilter, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ToChildBlockJoinWeight(this, parentQuery.createWeight(searcher, needsScores, boost), parentsFilter, needsScores);
|
||||
}
|
||||
|
||||
/** Return our parent query. */
|
||||
|
@ -107,16 +107,6 @@ public class ToChildBlockJoinQuery extends Query {
|
|||
parentWeight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return parentWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
parentWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
// NOTE: acceptDocs applies (and is checked) only in the
|
||||
// child document space
|
||||
@Override
|
||||
|
|
|
@ -115,8 +115,8 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores), parentsFilter, needsScores ? scoreMode : ScoreMode.None);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new BlockJoinWeight(this, childQuery.createWeight(searcher, needsScores, boost), parentsFilter, needsScores ? scoreMode : ScoreMode.None);
|
||||
}
|
||||
|
||||
/** Return our child query. */
|
||||
|
@ -141,16 +141,6 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
childWeight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return childWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
childWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
// NOTE: acceptDocs applies (and is checked) only in the
|
||||
// parent document space
|
||||
@Override
|
||||
|
|
|
@ -118,13 +118,13 @@ public class TestBlockJoin extends LuceneTestCase {
|
|||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
|
||||
|
||||
Weight weight = toParentBlockJoinQuery.createWeight(indexSearcher, false);
|
||||
Weight weight = toParentBlockJoinQuery.createWeight(indexSearcher, false, 1f);
|
||||
Set<Term> terms = new HashSet<>();
|
||||
weight.extractTerms(terms);
|
||||
Term[] termArr =terms.toArray(new Term[0]);
|
||||
assertEquals(1, termArr.length);
|
||||
|
||||
weight = toChildBlockJoinQuery.createWeight(indexSearcher, false);
|
||||
weight = toChildBlockJoinQuery.createWeight(indexSearcher, false, 1f);
|
||||
terms = new HashSet<>();
|
||||
weight.extractTerms(terms);
|
||||
termArr =terms.toArray(new Term[0]);
|
||||
|
|
|
@ -464,8 +464,8 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
private final Query fieldQuery = new FieldValueQuery(priceField);
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
Weight fieldWeight = fieldQuery.createWeight(searcher, false);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
Weight fieldWeight = fieldQuery.createWeight(searcher, false, boost);
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
|
@ -477,15 +477,6 @@ public class TestJoinUtil extends LuceneTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Scorer fieldScorer = fieldWeight.scorer(context);
|
||||
|
|
|
@ -419,9 +419,9 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SimWeight computeWeight(
|
||||
public SimWeight computeWeight(float boost,
|
||||
CollectionStatistics collectionStats, TermStatistics... termStats) {
|
||||
return sim.computeWeight(collectionStats, termStats);
|
||||
return sim.computeWeight(boost, collectionStats, termStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -63,12 +63,12 @@ public class BoostingQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
if (needsScores == false) {
|
||||
return match.createWeight(searcher, needsScores);
|
||||
return match.createWeight(searcher, needsScores, boost);
|
||||
}
|
||||
final Weight matchWeight = searcher.createWeight(match, needsScores);
|
||||
final Weight contextWeight = searcher.createWeight(context, false);
|
||||
final Weight matchWeight = searcher.createWeight(match, needsScores, boost);
|
||||
final Weight contextWeight = searcher.createWeight(context, false, boost);
|
||||
return new Weight(this) {
|
||||
|
||||
@Override
|
||||
|
@ -91,16 +91,6 @@ public class BoostingQuery extends Query {
|
|||
Explanation.match(boost, "boost"));
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return matchWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
matchWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
final Scorer matchScorer = matchWeight.scorer(context);
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.function.FunctionQuery;
|
||||
import org.apache.lucene.queries.function.ValueSource;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.FilterScorer;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -48,7 +47,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
|
||||
private Query subQuery;
|
||||
private Query[] scoringQueries; // never null (empty array if there are no valSrcQueries).
|
||||
private boolean strict = false; // if true, valueSource part of query does not take part in weights normalization.
|
||||
|
||||
/**
|
||||
* Create a CustomScoreQuery over input subQuery.
|
||||
|
@ -131,7 +129,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
sb.append(", ").append(scoringQuery.toString(field));
|
||||
}
|
||||
sb.append(")");
|
||||
sb.append(strict?" STRICT" : "");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
|
@ -144,7 +141,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
|
||||
private boolean equalsTo(CustomScoreQuery other) {
|
||||
return subQuery.equals(other.subQuery) &&
|
||||
strict == other.strict &&
|
||||
scoringQueries.length == other.scoringQueries.length &&
|
||||
Arrays.equals(scoringQueries, other.scoringQueries);
|
||||
}
|
||||
|
@ -155,7 +151,7 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
// Didn't change this hashcode, but it looks suspicious.
|
||||
return (classHash() +
|
||||
subQuery.hashCode() +
|
||||
Arrays.hashCode(scoringQueries)) ^ (strict ? 1234 : 4321);
|
||||
Arrays.hashCode(scoringQueries));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -171,19 +167,23 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
//=========================== W E I G H T ============================
|
||||
|
||||
private class CustomWeight extends Weight {
|
||||
Weight subQueryWeight;
|
||||
Weight[] valSrcWeights;
|
||||
boolean qStrict;
|
||||
float queryWeight;
|
||||
final Weight subQueryWeight;
|
||||
final Weight[] valSrcWeights;
|
||||
final float queryWeight;
|
||||
|
||||
public CustomWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public CustomWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
super(CustomScoreQuery.this);
|
||||
this.subQueryWeight = subQuery.createWeight(searcher, needsScores);
|
||||
// note we DONT incorporate our boost, nor pass down any boost
|
||||
// (e.g. from outer BQ), as there is no guarantee that the CustomScoreProvider's
|
||||
// function obeys the distributive law... it might call sqrt() on the subQuery score
|
||||
// or some other arbitrary function other than multiplication.
|
||||
// so, instead boosts are applied directly in score()
|
||||
this.subQueryWeight = subQuery.createWeight(searcher, needsScores, 1f);
|
||||
this.valSrcWeights = new Weight[scoringQueries.length];
|
||||
for(int i = 0; i < scoringQueries.length; i++) {
|
||||
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores);
|
||||
this.valSrcWeights[i] = scoringQueries[i].createWeight(searcher, needsScores, 1f);
|
||||
}
|
||||
this.qStrict = strict;
|
||||
this.queryWeight = boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -194,36 +194,6 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
float sum = subQueryWeight.getValueForNormalization();
|
||||
for (Weight valSrcWeight : valSrcWeights) {
|
||||
if (qStrict == false) { // otherwise do not include ValueSource part in the query normalization
|
||||
sum += valSrcWeight.getValueForNormalization();
|
||||
}
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
/*(non-Javadoc) @see org.apache.lucene.search.Weight#normalize(float) */
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
// note we DONT incorporate our boost, nor pass down any boost
|
||||
// (e.g. from outer BQ), as there is no guarantee that the CustomScoreProvider's
|
||||
// function obeys the distributive law... it might call sqrt() on the subQuery score
|
||||
// or some other arbitrary function other than multiplication.
|
||||
// so, instead boosts are applied directly in score()
|
||||
subQueryWeight.normalize(norm, 1f);
|
||||
for (Weight valSrcWeight : valSrcWeights) {
|
||||
if (qStrict) {
|
||||
valSrcWeight.normalize(1, 1); // do not normalize the ValueSource part
|
||||
} else {
|
||||
valSrcWeight.normalize(norm, 1f);
|
||||
}
|
||||
}
|
||||
queryWeight = boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Scorer subQueryScorer = subQueryWeight.scorer(context);
|
||||
|
@ -311,30 +281,8 @@ public class CustomScoreQuery extends Query implements Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new CustomWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if this is strict custom scoring.
|
||||
* In strict custom scoring, the {@link ValueSource} part does not participate in weight normalization.
|
||||
* This may be useful when one wants full control over how scores are modified, and does
|
||||
* not care about normalizing by the {@link ValueSource} part.
|
||||
* One particular case where this is useful if for testing this query.
|
||||
* <P>
|
||||
* Note: only has effect when the {@link ValueSource} part is not null.
|
||||
*/
|
||||
public boolean isStrict() {
|
||||
return strict;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the strict mode of this query.
|
||||
* @param strict The strict mode to set.
|
||||
* @see #isStrict()
|
||||
*/
|
||||
public void setStrict(boolean strict) {
|
||||
this.strict = strict;
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new CustomWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
/** The sub-query that CustomScoreQuery wraps, affecting both the score and which documents match. */
|
||||
|
|
|
@ -254,8 +254,8 @@ public class TermsQuery extends Query implements Accountable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new ConstantScoreWeight(this) {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new ConstantScoreWeight(this, boost) {
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
|
@ -334,8 +334,7 @@ public class TermsQuery extends Query implements Accountable {
|
|||
bq.add(new TermQuery(new Term(t.field, t.term), termContext), Occur.SHOULD);
|
||||
}
|
||||
Query q = new ConstantScoreQuery(bq.build());
|
||||
final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores);
|
||||
weight.normalize(1f, score());
|
||||
final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores, score());
|
||||
return new WeightOrDocIdSet(weight);
|
||||
} else {
|
||||
assert builder != null;
|
||||
|
|
|
@ -59,17 +59,17 @@ public final class BoostedQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new BoostedQuery.BoostedWeight(searcher, needsScores);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new BoostedQuery.BoostedWeight(searcher, needsScores, boost);
|
||||
}
|
||||
|
||||
private class BoostedWeight extends Weight {
|
||||
Weight qWeight;
|
||||
Map fcontext;
|
||||
|
||||
public BoostedWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public BoostedWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
super(BoostedQuery.this);
|
||||
this.qWeight = searcher.createWeight(q, needsScores);
|
||||
this.qWeight = searcher.createWeight(q, needsScores, boost);
|
||||
this.fcontext = ValueSource.newContext(searcher);
|
||||
boostVal.createWeight(fcontext,searcher);
|
||||
}
|
||||
|
@ -79,16 +79,6 @@ public final class BoostedQuery extends Query {
|
|||
qWeight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return qWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
qWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
Scorer subQueryScorer = qWeight.scorer(context);
|
||||
|
|
|
@ -55,35 +55,23 @@ public class FunctionQuery extends Query {
|
|||
|
||||
protected class FunctionWeight extends Weight {
|
||||
protected final IndexSearcher searcher;
|
||||
protected float queryNorm, boost, queryWeight;
|
||||
protected final float boost;
|
||||
protected final Map context;
|
||||
|
||||
public FunctionWeight(IndexSearcher searcher) throws IOException {
|
||||
public FunctionWeight(IndexSearcher searcher, float boost) throws IOException {
|
||||
super(FunctionQuery.this);
|
||||
this.searcher = searcher;
|
||||
this.context = ValueSource.newContext(searcher);
|
||||
func.createWeight(context, searcher);
|
||||
normalize(1f, 1f);;
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return queryWeight * queryWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
this.queryNorm = norm;
|
||||
this.boost = boost;
|
||||
this.queryWeight = norm * boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return new AllScorer(context, this, queryWeight);
|
||||
return new AllScorer(context, this, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -96,14 +84,14 @@ public class FunctionQuery extends Query {
|
|||
final IndexReader reader;
|
||||
final FunctionWeight weight;
|
||||
final int maxDoc;
|
||||
final float qWeight;
|
||||
final float boost;
|
||||
final DocIdSetIterator iterator;
|
||||
final FunctionValues vals;
|
||||
|
||||
public AllScorer(LeafReaderContext context, FunctionWeight w, float qWeight) throws IOException {
|
||||
public AllScorer(LeafReaderContext context, FunctionWeight w, float boost) throws IOException {
|
||||
super(w);
|
||||
this.weight = w;
|
||||
this.qWeight = qWeight;
|
||||
this.boost = boost;
|
||||
this.reader = context.reader();
|
||||
this.maxDoc = reader.maxDoc();
|
||||
iterator = DocIdSetIterator.all(context.reader().maxDoc());
|
||||
|
@ -122,7 +110,7 @@ public class FunctionQuery extends Query {
|
|||
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
float score = qWeight * vals.floatVal(docID());
|
||||
float score = boost * vals.floatVal(docID());
|
||||
|
||||
// Current Lucene priority queues can't handle NaN and -Infinity, so
|
||||
// map to -Float.MAX_VALUE. This conditional handles both -infinity
|
||||
|
@ -136,20 +124,19 @@ public class FunctionQuery extends Query {
|
|||
}
|
||||
|
||||
public Explanation explain(int doc) throws IOException {
|
||||
float sc = qWeight * vals.floatVal(doc);
|
||||
float sc = boost * vals.floatVal(doc);
|
||||
|
||||
return Explanation.match(sc, "FunctionQuery(" + func + "), product of:",
|
||||
vals.explain(doc),
|
||||
Explanation.match(weight.boost, "boost"),
|
||||
Explanation.match(weight.queryNorm, "queryNorm"));
|
||||
Explanation.match(weight.boost, "boost"));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
return new FunctionQuery.FunctionWeight(searcher);
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new FunctionQuery.FunctionWeight(searcher, boost);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ public class FunctionRangeQuery extends Query {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
|
||||
return new FunctionRangeWeight(searcher);
|
||||
}
|
||||
|
||||
|
@ -133,17 +133,6 @@ public class FunctionRangeQuery extends Query {
|
|||
//none
|
||||
}
|
||||
|
||||
//Note: this uses the functionValue's floatVal() as the score; queryNorm/boost is ignored.
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return 1f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
//no-op
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
FunctionValues functionValues = valueSource.getValues(vsContext, context);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue