LUCENE-6590: Replace Query.getBoost, setBoost and clone with a new BoostQuery.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1701621 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Adrien Grand 2015-09-07 13:34:46 +00:00
parent 92d3ef3462
commit 962313b83b
233 changed files with 1728 additions and 1651 deletions

View File

@ -73,6 +73,12 @@ New Features
fast, very accurate query to find all indexed points within an
earth-surface shape (Karl Wright, Mike McCandless)
API Changes
* LUCENE-6590: Query.setBoost(), Query.getBoost() and Query.clone() are gone.
In order to apply boosts, you now need to wrap queries in a BoostQuery.
(Adrien Grand)
Optimizations
* LUCENE-6708: TopFieldCollector does not compute the score several times on the

View File

@ -63,3 +63,18 @@ AttributeImpl removed the default, reflection-based implementation of
reflectWith(AtrributeReflector). The method was made abstract. If you have
implemented your own attribute, make sure to add the required method sigature.
See the Javadocs for an example.
## Query.setBoost() and Query.clone() are removed (LUCENE-6590)
Query.setBoost has been removed. In order to apply a boost to a Query, you now
need to wrap it inside a BoostQuery. For instance,
Query q = ...;
float boost = ...;
q = new BoostQuery(q, boost);
would be equivalent to the following code with the old setBoost API:
Query q = ...;
float boost = ...;
q.setBoost(q.getBoost() * boost);

View File

@ -18,13 +18,11 @@ package org.apache.lucene.search;
*/
import java.io.IOException;
import java.util.Objects;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.automaton.Operations;
@ -149,7 +147,6 @@ public class AutomatonQuery extends MultiTermQuery {
buffer.append('\n');
buffer.append(automaton.toString());
buffer.append("}");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -252,8 +252,10 @@ public final class BlendedTermQuery extends Query {
if (i != 0) {
builder.append(" ");
}
TermQuery termQuery = new TermQuery(terms[i]);
termQuery.setBoost(boosts[i]);
Query termQuery = new TermQuery(terms[i]);
if (boosts[i] != 1f) {
termQuery = new BoostQuery(termQuery, boosts[i]);
}
builder.append(termQuery.toString(field));
}
builder.append(")");
@ -287,14 +289,14 @@ public final class BlendedTermQuery extends Query {
contexts[i] = adjustFrequencies(contexts[i], df, ttf);
}
TermQuery[] termQueries = new TermQuery[terms.length];
Query[] termQueries = new Query[terms.length];
for (int i = 0; i < terms.length; ++i) {
termQueries[i] = new TermQuery(terms[i], contexts[i]);
termQueries[i].setBoost(boosts[i]);
if (boosts[i] != 1f) {
termQueries[i] = new BoostQuery(termQueries[i], boosts[i]);
}
}
Query rewritten = rewriteMethod.rewrite(termQueries);
rewritten.setBoost(getBoost());
return rewritten;
return rewriteMethod.rewrite(termQueries);
}
private static TermContext adjustFrequencies(TermContext ctx, int artificialDf, long artificialTtf) {

View File

@ -28,7 +28,6 @@ import java.util.Objects;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.ToStringUtils;
/** A Query that matches documents matching boolean combinations of other
* queries, e.g. {@link TermQuery}s, {@link PhraseQuery}s or other
@ -203,22 +202,11 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
BooleanClause c = clauses.get(0);
if (!c.isProhibited()) { // just return clause
Query query = c.getQuery().rewrite(reader); // rewrite first
Query query = c.getQuery();
if (c.isScoring()) {
if (getBoost() != 1.0f) { // incorporate boost
if (query == c.getQuery()) { // if rewrite was no-op
query = query.clone(); // then clone before boost
}
// Since the BooleanQuery only has 1 clause, the BooleanQuery will be
// written out. Therefore the rewritten Query's boost must incorporate both
// the clause's boost, and the boost of the BooleanQuery itself
query.setBoost(getBoost() * query.getBoost());
}
} else {
// our single clause is a filter
query = new ConstantScoreQuery(query);
query.setBoost(0);
if (c.isScoring() == false) {
// our single clause is a filter, so we need to disable scoring
query = new BoostQuery(new ConstantScoreQuery(query), 0);
}
return query;
@ -238,9 +226,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
builder.add(rewritten, clause.getOccur());
}
if (actuallyRewritten) {
BooleanQuery rewritten = builder.build();
rewritten.setBoost(getBoost());
return rewritten;
return builder.build();
}
return super.rewrite(reader);
}
@ -249,7 +235,7 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
boolean needParens= getBoost() != 1.0 || getMinimumNumberShouldMatch() > 0;
boolean needParens = getMinimumNumberShouldMatch() > 0;
if (needParens) {
buffer.append("(");
}
@ -282,10 +268,6 @@ public class BooleanQuery extends Query implements Iterable<BooleanClause> {
buffer.append(getMinimumNumberShouldMatch());
}
if (getBoost() != 1.0f) {
buffer.append(ToStringUtils.boost(getBoost()));
}
return buffer.toString();
}

View File

@ -43,6 +43,7 @@ final class BooleanWeight extends Weight {
final boolean disableCoord;
final boolean needsScores;
final float coords[];
float boost = 1f;
BooleanWeight(BooleanQuery query, IndexSearcher searcher, boolean needsScores, boolean disableCoord) throws IOException {
super(query);
@ -105,7 +106,7 @@ final class BooleanWeight extends Weight {
i += 1;
}
sum *= query.getBoost() * query.getBoost(); // boost each sub-weight
sum *= boost * boost; // boost each sub-weight
return sum ;
}
@ -127,11 +128,11 @@ final class BooleanWeight extends Weight {
}
@Override
public void normalize(float norm, float topLevelBoost) {
topLevelBoost *= query.getBoost(); // incorporate boost
public void normalize(float norm, float boost) {
this.boost = boost;
for (Weight w : weights) {
// normalize all clauses, (even if non-scoring in case of side affects)
w.normalize(norm, topLevelBoost);
w.normalize(norm, boost);
}
}

View File

@ -0,0 +1,181 @@
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
/**
* A {@link Query} wrapper that allows to give a boost to the wrapped query.
* Boost values that are less than one will give less importance to this
* query compared to other ones while values that are greater than one will
* give more importance to the scores returned by this query.
*/
public final class BoostQuery extends Query {
/** By default we enclose the wrapped query within parenthesis, but this is
* not required for all queries, so we use a whitelist of queries that don't
* need parenthesis to have a better toString(). */
private static final Set<Class<? extends Query>> NO_PARENS_REQUIRED_QUERIES = Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(
TermQuery.class,
PhraseQuery.class,
MultiPhraseQuery.class,
ConstantScoreQuery.class,
TermRangeQuery.class,
NumericRangeQuery.class,
PrefixQuery.class,
FuzzyQuery.class,
WildcardQuery.class,
RegexpQuery.class
)));
private final Query query;
private final float boost;
/** Sole constructor: wrap {@code query} in such a way that the produced
* scores will be boosted by {@code boost}. */
public BoostQuery(Query query, float boost) {
this.query = Objects.requireNonNull(query);
this.boost = boost;
}
/**
* Return the wrapped {@link Query}.
*/
public Query getQuery() {
return query;
}
/**
* Return the applied boost.
*/
public float getBoost() {
return boost;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
BoostQuery that = (BoostQuery) obj;
return query.equals(that.query)
&& Float.floatToIntBits(boost) == Float.floatToIntBits(that.boost);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + query.hashCode();
h = 31 * h + Float.floatToIntBits(boost);
return h;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
final Query rewritten = query.rewrite(reader);
if (boost == 1f) {
return rewritten;
}
if (rewritten.getClass() == BoostQuery.class) {
BoostQuery in = (BoostQuery) rewritten;
return new BoostQuery(in.query, boost * in.boost);
}
if (boost == 0f && rewritten.getClass() != ConstantScoreQuery.class) {
// so that we pass needScores=false
return new BoostQuery(new ConstantScoreQuery(rewritten), 0f);
}
if (query != rewritten) {
return new BoostQuery(rewritten, boost);
}
return super.rewrite(reader);
}
@Override
public String toString(String field) {
boolean needsParens = NO_PARENS_REQUIRED_QUERIES.contains(query.getClass()) == false;
StringBuilder builder = new StringBuilder();
if (needsParens) {
builder.append("(");
}
builder.append(query.toString(field));
if (needsParens) {
builder.append(")");
}
builder.append("^");
builder.append(boost);
return builder.toString();
}
@Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final Weight weight = query.createWeight(searcher, needsScores);
if (needsScores == false) {
return weight;
}
// Apply the query boost, this may impact the return value of getValueForNormalization()
weight.normalize(1f, boost);
return new Weight(this) {
@Override
public void extractTerms(Set<Term> terms) {
weight.extractTerms(terms);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return weight.explain(context, doc);
}
@Override
public float getValueForNormalization() throws IOException {
return weight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
weight.normalize(norm, BoostQuery.this.boost * boost);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
return weight.scorer(context);
}
@Override
public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
return weight.bulkScorer(context);
}
};
}
}

View File

@ -41,7 +41,7 @@ import org.apache.lucene.util.RoaringDocIdSet;
* needed. The purpose is to allow queries to simply care about matching and
* scoring, and then wrap with this class to add caching.
*/
public class CachingWrapperQuery extends Query implements Accountable {
public class CachingWrapperQuery extends Query implements Accountable, Cloneable {
private Query query; // not final because of clone
private final QueryCachingPolicy policy;
private final Map<Object,DocIdSet> cache = Collections.synchronizedMap(new WeakHashMap<Object,DocIdSet>());
@ -62,13 +62,6 @@ public class CachingWrapperQuery extends Query implements Accountable {
this(query, QueryCachingPolicy.CacheOnLargeSegments.DEFAULT);
}
@Override
public CachingWrapperQuery clone() {
final CachingWrapperQuery clone = (CachingWrapperQuery) super.clone();
clone.query = query.clone();
return clone;
}
/**
* Gets the contained query.
* @return the contained query.
@ -77,16 +70,6 @@ public class CachingWrapperQuery extends Query implements Accountable {
return query;
}
@Override
public float getBoost() {
return query.getBoost();
}
@Override
public void setBoost(float b) {
query.setBoost(b);
}
/**
* Default cache implementation: uses {@link RoaringDocIdSet}.
*/
@ -98,11 +81,16 @@ public class CachingWrapperQuery extends Query implements Accountable {
public Query rewrite(IndexReader reader) throws IOException {
final Query rewritten = query.rewrite(reader);
if (query == rewritten) {
return this;
return super.rewrite(reader);
} else {
CachingWrapperQuery clone = clone();
clone.query = rewritten;
return clone;
CachingWrapperQuery clone;
try {
clone = (CachingWrapperQuery) clone();
clone.query = rewritten;
return clone;
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
}

View File

@ -25,18 +25,17 @@ import java.util.Objects;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
/**
* A query that wraps another query and simply returns a constant score equal to the
* query boost for every document that matches the query.
* It therefore simply strips of all scores and returns a constant one.
* A query that wraps another query and simply returns a constant score equal to
* 1 for every document that matches the query.
* It therefore simply strips of all scores and always returns 1.
*/
public class ConstantScoreQuery extends Query {
protected final Query query;
public final class ConstantScoreQuery extends Query {
private final Query query;
/** Strips off scores from the passed in Query. The hits will get a constant score
* dependent on the boost factor of this query. */
* of 1. */
public ConstantScoreQuery(Query query) {
this.query = Objects.requireNonNull(query, "Query must not be null");
}
@ -50,21 +49,19 @@ public class ConstantScoreQuery extends Query {
public Query rewrite(IndexReader reader) throws IOException {
Query rewritten = query.rewrite(reader);
if (rewritten.getClass() == getClass()) {
if (getBoost() != rewritten.getBoost()) {
rewritten = rewritten.clone();
rewritten.setBoost(getBoost());
}
return rewritten;
}
if (rewritten != query) {
rewritten = new ConstantScoreQuery(rewritten);
rewritten.setBoost(this.getBoost());
return new ConstantScoreQuery(rewritten);
}
if (rewritten.getClass() == ConstantScoreQuery.class) {
return rewritten;
}
return this;
if (rewritten.getClass() == BoostQuery.class) {
return new ConstantScoreQuery(((BoostQuery) rewritten).getQuery());
}
return super.rewrite(reader);
}
/** We return this as our {@link BulkScorer} so that if the CSQ
@ -161,7 +158,6 @@ public class ConstantScoreQuery extends Query {
return new StringBuilder("ConstantScore(")
.append(query.toString(field))
.append(')')
.append(ToStringUtils.boost(getBoost()))
.toString();
}

View File

@ -32,13 +32,13 @@ import org.apache.lucene.index.Term;
*/
public abstract class ConstantScoreWeight extends Weight {
private float boost;
private float queryNorm;
private float queryWeight;
protected ConstantScoreWeight(Query query) {
super(query);
queryWeight = getQuery().getBoost();
queryNorm = 1f;
normalize(1f, 1f);
}
@Override
@ -54,9 +54,20 @@ public abstract class ConstantScoreWeight extends Weight {
}
@Override
public final void normalize(float norm, float topLevelBoost) {
queryNorm = norm * topLevelBoost;
queryWeight *= queryNorm;
public void normalize(float norm, float boost) {
this.boost = boost;
queryNorm = norm;
queryWeight = queryNorm * boost;
}
/** Return the normalization factor for this weight. */
protected final float queryNorm() {
return queryNorm;
}
/** Return the boost for this weight. */
protected final float boost() {
return boost;
}
/** Return the score produced by this {@link Weight}. */
@ -65,7 +76,7 @@ public abstract class ConstantScoreWeight extends Weight {
}
@Override
public final Explanation explain(LeafReaderContext context, int doc) throws IOException {
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
final Scorer s = scorer(context);
final boolean exists;
if (s == null) {
@ -82,7 +93,7 @@ public abstract class ConstantScoreWeight extends Weight {
if (exists) {
return Explanation.match(
queryWeight, getQuery().toString() + ", product of:",
Explanation.match(getQuery().getBoost(), "boost"), Explanation.match(queryNorm, "queryNorm"));
Explanation.match(boost, "boost"), Explanation.match(queryNorm, "queryNorm"));
} else {
return Explanation.noMatch(getQuery().toString() + " doesn't match id " + doc);
}

View File

@ -42,7 +42,7 @@ import org.apache.lucene.index.Term;
* include this term in only the best of those multiple fields, without confusing this with the better case of two different terms
* in the multiple fields.
*/
public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
public final class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/* The subqueries */
private ArrayList<Query> disjuncts = new ArrayList<>();
@ -118,6 +118,7 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
/** The Weights for our subqueries, in 1-1 correspondence with disjuncts */
protected final ArrayList<Weight> weights = new ArrayList<>(); // The Weight's for our subqueries, in 1-1 correspondence with disjuncts
private final boolean needsScores;
private float boost;
/** Construct the Weight for this Query searched by searcher. Recursively construct subquery weights. */
public DisjunctionMaxWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
@ -126,6 +127,7 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
weights.add(searcher.createWeight(disjunctQuery, needsScores));
}
this.needsScores = needsScores;
this.boost = 1f;
}
@Override
@ -145,16 +147,15 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
max = Math.max(max, sub);
}
float boost = getBoost();
return (((sum - max) * tieBreakerMultiplier * tieBreakerMultiplier) + max) * boost * boost;
}
/** Apply the computed normalization factor to our subqueries */
@Override
public void normalize(float norm, float topLevelBoost) {
topLevelBoost *= getBoost(); // Incorporate our boost
public void normalize(float norm, float boost) {
this.boost = boost;
for (Weight wt : weights) {
wt.normalize(norm, topLevelBoost);
wt.normalize(norm, boost);
}
}
@ -219,34 +220,20 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
public Query rewrite(IndexReader reader) throws IOException {
int numDisjunctions = disjuncts.size();
if (numDisjunctions == 1) {
Query singleton = disjuncts.get(0);
Query result = singleton.rewrite(reader);
if (getBoost() != 1.0f) {
if (result == singleton) result = result.clone();
result.setBoost(getBoost() * result.getBoost());
}
return result;
return disjuncts.get(0);
}
DisjunctionMaxQuery clone = null;
for (int i = 0 ; i < numDisjunctions; i++) {
Query clause = disjuncts.get(i);
Query rewrite = clause.rewrite(reader);
if (rewrite != clause) {
if (clone == null) clone = this.clone();
clone.disjuncts.set(i, rewrite);
}
DisjunctionMaxQuery rewritten = new DisjunctionMaxQuery(tieBreakerMultiplier);
boolean actuallyRewritten = false;
for (Query sub : disjuncts) {
Query rewrittenSub = sub.rewrite(reader);
actuallyRewritten |= rewrittenSub != sub;
rewritten.add(rewrittenSub);
}
if (clone != null) return clone;
else return this;
}
/** Create a shallow copy of us -- used in rewriting if necessary
* @return a copy of us (but reuse, don't copy, our subqueries) */
@Override @SuppressWarnings("unchecked")
public DisjunctionMaxQuery clone() {
DisjunctionMaxQuery clone = (DisjunctionMaxQuery)super.clone();
clone.disjuncts = (ArrayList<Query>) this.disjuncts.clone();
return clone;
if (actuallyRewritten) {
return rewritten;
}
return super.rewrite(reader);
}
/** Prettyprint us.
@ -273,10 +260,6 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
buffer.append("~");
buffer.append(tieBreakerMultiplier);
}
if (getBoost() != 1.0) {
buffer.append("^");
buffer.append(getBoost());
}
return buffer.toString();
}
@ -298,9 +281,10 @@ public class DisjunctionMaxQuery extends Query implements Iterable<Query> {
*/
@Override
public int hashCode() {
return Float.floatToIntBits(getBoost())
+ Float.floatToIntBits(tieBreakerMultiplier)
+ disjuncts.hashCode();
int h = super.hashCode();
h = 31 * h + Float.floatToIntBits(tieBreakerMultiplier);
h = 31 * h + disjuncts.hashCode();
return h;
}

View File

@ -18,7 +18,6 @@ package org.apache.lucene.search;
*/
import java.io.IOException;
import java.util.Objects;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
@ -39,9 +38,7 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
@Override
public Query rewrite(IndexReader reader, MultiTermQuery query) {
Query result = new ConstantScoreQuery(new MultiTermQueryDocValuesWrapper(query));
result.setBoost(query.getBoost());
return result;
return new ConstantScoreQuery(new MultiTermQueryDocValuesWrapper(query));
}
static class MultiTermQueryDocValuesWrapper extends Query {
@ -63,18 +60,16 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
@Override
public final boolean equals(final Object o) {
if (o==this) return true;
if (o==null) return false;
if (this.getClass().equals(o.getClass())) {
final MultiTermQueryDocValuesWrapper that = (MultiTermQueryDocValuesWrapper) o;
return this.query.equals(that.query) && this.getBoost() == that.getBoost();
if (super.equals(o) == false) {
return false;
}
return false;
MultiTermQueryDocValuesWrapper that = (MultiTermQueryDocValuesWrapper) o;
return query.equals(that.query);
}
@Override
public final int hashCode() {
return Objects.hash(getClass(), query, getBoost());
return 31 * super.hashCode() + query.hashCode();
}
/** Returns the field name for this query */

View File

@ -23,7 +23,6 @@ import java.util.Objects;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
/**
* A {@link Query} that matches documents that have a value for a given field
@ -41,21 +40,21 @@ public final class FieldValueQuery extends Query {
@Override
public boolean equals(Object obj) {
if (obj instanceof FieldValueQuery == false) {
if (super.equals(obj) == false) {
return false;
}
final FieldValueQuery that = (FieldValueQuery) obj;
return super.equals(obj) && field.equals(that.field);
return field.equals(that.field);
}
@Override
public int hashCode() {
return Objects.hash(getClass(), field, getBoost());
return 31 * super.hashCode() + field.hashCode();
}
@Override
public String toString(String field) {
return "FieldValueQuery [field=" + this.field + "]" + ToStringUtils.boost(getBoost());
return "FieldValueQuery [field=" + this.field + "]";
}
@Override

View File

@ -92,7 +92,7 @@ public abstract class Filter extends Query {
}
@Override
public void normalize(float norm, float topLevelBoost) {}
public void normalize(float norm, float boost) {}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.util.automaton.LevenshteinAutomata;
/** Implements the fuzzy search query. The similarity measurement
@ -172,7 +171,6 @@ public class FuzzyQuery extends MultiTermQuery {
buffer.append(term.text());
buffer.append('~');
buffer.append(Integer.toString(maxEdits));
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -87,7 +87,7 @@ public class IndexSearcher {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
return new SimWeight() {
@Override
@ -96,7 +96,7 @@ public class IndexSearcher {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {}
public void normalize(float queryNorm, float boost) {}
};
}

View File

@ -216,7 +216,8 @@ public class LRUQueryCache implements QueryCache, Accountable {
}
synchronized DocIdSet get(Query key, LeafReaderContext context) {
key = QueryCache.cacheKey(key);
assert key instanceof BoostQuery == false;
assert key instanceof ConstantScoreQuery == false;
final Object readerKey = context.reader().getCoreCacheKey();
final LeafCache leafCache = cache.get(readerKey);
if (leafCache == null) {
@ -241,9 +242,8 @@ public class LRUQueryCache implements QueryCache, Accountable {
synchronized void putIfAbsent(Query query, LeafReaderContext context, DocIdSet set) {
// under a lock to make sure that mostRecentlyUsedQueries and cache remain sync'ed
// we don't want to have user-provided queries as keys in our cache since queries are mutable
query = query.clone();
query.setBoost(1f);
assert query == QueryCache.cacheKey(query);
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
Query singleton = uniqueQueries.putIfAbsent(query, query);
if (singleton == null) {
onQueryCache(singleton, LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY + ramBytesUsed(query));
@ -306,7 +306,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
* Remove all cache entries for the given query.
*/
public synchronized void clearQuery(Query query) {
final Query singleton = uniqueQueries.remove(QueryCache.cacheKey(query));
final Query singleton = uniqueQueries.remove(query);
if (singleton != null) {
onEviction(singleton);
}
@ -510,12 +510,14 @@ public class LRUQueryCache implements QueryCache, Accountable {
}
DocIdSet get(Query query) {
assert query == QueryCache.cacheKey(query);
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
return cache.get(query);
}
void putIfAbsent(Query query, DocIdSet set) {
assert query == QueryCache.cacheKey(query);
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
if (cache.putIfAbsent(query, set) == null) {
// the set was actually put
onDocIdSetCache(HASHTABLE_RAM_BYTES_PER_ENTRY + set.ramBytesUsed());
@ -523,7 +525,8 @@ public class LRUQueryCache implements QueryCache, Accountable {
}
void remove(Query query) {
assert query == QueryCache.cacheKey(query);
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
DocIdSet removed = cache.remove(query);
if (removed != null) {
onDocIdSetEviction(HASHTABLE_RAM_BYTES_PER_ENTRY + removed.ramBytesUsed());

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
/**
* A query that matches all documents.
@ -70,9 +69,6 @@ public final class MatchAllDocsQuery extends Query {
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
buffer.append("*:*");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
return "*:*";
}
}

View File

@ -20,7 +20,6 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.ToStringUtils;
/**
* A query that matches no documents.
@ -30,17 +29,11 @@ public class MatchNoDocsQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
// Rewrite to an empty BooleanQuery so no Scorer or Weight is required
BooleanQuery.Builder builder = new BooleanQuery.Builder();
Query rewritten = builder.build();
rewritten.setBoost(getBoost());
return rewritten;
return new BooleanQuery.Builder().build();
}
@Override
public String toString(String field) {
StringBuilder buffer = new StringBuilder();
buffer.append("");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
return "";
}
}

View File

@ -35,7 +35,6 @@ import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.ToStringUtils;
/**
* MultiPhraseQuery is a generalized version of PhraseQuery, with an added
@ -147,7 +146,7 @@ public class MultiPhraseQuery extends Query {
allTermStats.add(searcher.termStatistics(term, termContext));
}
}
stats = similarity.computeWeight(getBoost(),
stats = similarity.computeWeight(
searcher.collectionStatistics(field),
allTermStats.toArray(new TermStatistics[allTermStats.size()]));
}
@ -167,8 +166,8 @@ public class MultiPhraseQuery extends Query {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
stats.normalize(queryNorm, topLevelBoost);
public void normalize(float queryNorm, float boost) {
stats.normalize(queryNorm, boost);
}
@Override
@ -251,11 +250,9 @@ public class MultiPhraseQuery extends Query {
}
@Override
public Query rewrite(IndexReader reader) {
public Query rewrite(IndexReader reader) throws IOException {
if (termArrays.isEmpty()) {
MatchNoDocsQuery rewritten = new MatchNoDocsQuery();
rewritten.setBoost(getBoost());
return rewritten;
return new MatchNoDocsQuery();
} else if (termArrays.size() == 1) { // optimize one-term case
Term[] terms = termArrays.get(0);
BooleanQuery.Builder builder = new BooleanQuery.Builder();
@ -263,11 +260,9 @@ public class MultiPhraseQuery extends Query {
for (int i=0; i<terms.length; i++) {
builder.add(new TermQuery(terms[i]), BooleanClause.Occur.SHOULD);
}
BooleanQuery boq = builder.build();
boq.setBoost(getBoost());
return boq;
return builder.build();
} else {
return this;
return super.rewrite(reader);
}
}
@ -322,8 +317,6 @@ public class MultiPhraseQuery extends Query {
buffer.append(slop);
}
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -94,9 +94,7 @@ public abstract class MultiTermQuery extends Query {
public static final RewriteMethod CONSTANT_SCORE_REWRITE = new RewriteMethod() {
@Override
public Query rewrite(IndexReader reader, MultiTermQuery query) {
Query result = new MultiTermQueryConstantScoreWrapper<>(query);
result.setBoost(query.getBoost());
return result;
return new MultiTermQueryConstantScoreWrapper<>(query);
}
};
@ -172,8 +170,7 @@ public abstract class MultiTermQuery extends Query {
@Override
protected void addClause(BooleanQuery.Builder topLevel, Term term, int docCount, float boost, TermContext states) {
final TermQuery tq = new TermQuery(term, states);
tq.setBoost(boost);
topLevel.add(tq, BooleanClause.Occur.SHOULD);
topLevel.add(new BoostQuery(tq, boost), BooleanClause.Occur.SHOULD);
}
}
@ -271,8 +268,7 @@ public abstract class MultiTermQuery extends Query {
@Override
protected void addClause(BooleanQuery.Builder topLevel, Term term, int docFreq, float boost, TermContext states) {
final Query q = new ConstantScoreQuery(new TermQuery(term, states));
q.setBoost(boost);
topLevel.add(q, BooleanClause.Occur.SHOULD);
topLevel.add(new BoostQuery(q, boost), BooleanClause.Occur.SHOULD);
}
}
@ -337,7 +333,6 @@ public abstract class MultiTermQuery extends Query {
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Float.floatToIntBits(getBoost());
result = prime * result + rewriteMethod.hashCode();
if (field != null) result = prime * result + field.hashCode();
return result;

View File

@ -96,7 +96,7 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
return false;
}
final MultiTermQueryConstantScoreWrapper<?> that = (MultiTermQueryConstantScoreWrapper<?>) o;
return this.query.equals(that.query) && this.getBoost() == that.getBoost();
return this.query.equals(that.query);
}
@Override
@ -157,8 +157,9 @@ final class MultiTermQueryConstantScoreWrapper<Q extends MultiTermQuery> extends
bq.add(new TermQuery(new Term(query.field, t.term), termContext), Occur.SHOULD);
}
Query q = new ConstantScoreQuery(bq.build());
q.setBoost(score());
return new WeightOrDocIdSet(searcher.rewrite(q).createWeight(searcher, needsScores));
final Weight weight = searcher.rewrite(q).createWeight(searcher, needsScores);
weight.normalize(1f, score());
return new WeightOrDocIdSet(weight);
}
// Too many terms: go back to the terms we already collected and start building the bit set

View File

@ -74,9 +74,7 @@ public class NGramPhraseQuery extends Query {
builder.add(terms[i], i);
}
}
PhraseQuery rewritten = builder.build();
rewritten.setBoost(phraseQuery.getBoost());
return rewritten;
return builder.build();
}
@Override
@ -106,16 +104,6 @@ public class NGramPhraseQuery extends Query {
return phraseQuery.getPositions();
}
@Override
public float getBoost() {
return phraseQuery.getBoost();
}
@Override
public void setBoost(float b) {
phraseQuery.setBoost(b);
}
@Override
public String toString(String field) {
return phraseQuery.toString(field);

View File

@ -33,7 +33,6 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.index.Term; // for javadocs
/**
@ -324,7 +323,6 @@ public final class NumericRangeQuery<T extends Number> extends MultiTermQuery {
.append(" TO ")
.append((max == null) ? "*" : max.toString())
.append(maxInclusive ? ']' : '}')
.append(ToStringUtils.boost(getBoost()))
.toString();
}

View File

@ -38,7 +38,6 @@ import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
/** A Query that matches documents containing a particular sequence of terms.
* A PhraseQuery is built by QueryParser for input like <code>"new york"</code>.
@ -265,21 +264,15 @@ public class PhraseQuery extends Query {
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (terms.length == 0) {
MatchNoDocsQuery q = new MatchNoDocsQuery();
q.setBoost(getBoost());
return q;
return new MatchNoDocsQuery();
} else if (terms.length == 1) {
TermQuery tq = new TermQuery(terms[0]);
tq.setBoost(getBoost());
return tq;
return new TermQuery(terms[0]);
} else if (positions[0] != 0) {
int[] newPositions = new int[positions.length];
for (int i = 0; i < positions.length; ++i) {
newPositions[i] = positions[i] - positions[0];
}
PhraseQuery rewritten = new PhraseQuery(slop, terms, newPositions);
rewritten.setBoost(getBoost());
return rewritten;
return new PhraseQuery(slop, terms, newPositions);
} else {
return super.rewrite(reader);
}
@ -375,7 +368,7 @@ public class PhraseQuery extends Query {
states[i] = TermContext.build(context, term);
termStats[i] = searcher.termStatistics(term, states[i]);
}
stats = similarity.computeWeight(getBoost(), searcher.collectionStatistics(field), termStats);
stats = similarity.computeWeight(searcher.collectionStatistics(field), termStats);
}
@Override
@ -392,8 +385,8 @@ public class PhraseQuery extends Query {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
stats.normalize(queryNorm, topLevelBoost);
public void normalize(float queryNorm, float boost) {
stats.normalize(queryNorm, boost);
}
@Override
@ -514,8 +507,6 @@ public class PhraseQuery extends Query {
buffer.append(slop);
}
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.util.automaton.Automaton;
/** A Query that matches documents containing terms with a specified prefix. A PrefixQuery
@ -71,7 +70,6 @@ public class PrefixQuery extends AutomatonQuery {
}
buffer.append(term.text());
buffer.append('*');
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -41,20 +41,7 @@ import org.apache.lucene.index.IndexReader;
<p>See also the family of {@link org.apache.lucene.search.spans Span Queries}
and additional queries available in the <a href="{@docRoot}/../queries/overview-summary.html">Queries module</a>
*/
public abstract class Query implements Cloneable {
private float boost = 1.0f; // query boost factor
/** Sets the boost for this query clause to <code>b</code>. Documents
* matching this clause will (in addition to the normal weightings) have
* their score multiplied by <code>b</code>.
*/
public void setBoost(float b) { boost = b; }
/** Gets the boost for this clause. Documents matching
* this clause will (in addition to the normal weightings) have their score
* multiplied by <code>b</code>. The boost is 1.0 by default.
*/
public float getBoost() { return boost; }
public abstract class Query {
/** Prints a query to a string, with <code>field</code> assumed to be the
* default field and omitted.
@ -87,19 +74,9 @@ public abstract class Query implements Cloneable {
return this;
}
/** Returns a clone of this query. */
@Override
public Query clone() {
try {
return (Query)super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException("Clone not supported: " + e.getMessage());
}
}
@Override
public int hashCode() {
return Float.floatToIntBits(getBoost()) ^ getClass().hashCode();
return getClass().hashCode();
}
@Override
@ -110,9 +87,6 @@ public abstract class Query implements Cloneable {
return false;
if (getClass() != obj.getClass())
return false;
Query other = (Query) obj;
if (Float.floatToIntBits(boost) != Float.floatToIntBits(other.boost))
return false;
return true;
}
}

View File

@ -25,22 +25,6 @@ package org.apache.lucene.search;
*/
public interface QueryCache {
/**
* Return a key for the given query that only takes matching documents into
* account. Boosts will be ignored.
* @lucene.internal
*/
public static Query cacheKey(Query query) {
if (query.getBoost() == 1f) {
return query;
} else {
Query key = query.clone();
key.setBoost(1f);
assert key == cacheKey(key);
return key;
}
}
/**
* Return a wrapper around the provided <code>weight</code> that will cache
* matching docs per-segment accordingly to the given <code>policy</code>.

View File

@ -46,9 +46,7 @@ public class QueryWrapperFilter extends Filter {
@Override
public Query rewrite(IndexReader reader) throws IOException {
ConstantScoreQuery rewritten = new ConstantScoreQuery(query);
rewritten.setBoost(0);
return rewritten;
return new BoostQuery(new ConstantScoreQuery(query), 0f);
}
/** returns the inner Query */

View File

@ -121,7 +121,6 @@ public class RegexpQuery extends AutomatonQuery {
buffer.append('/');
buffer.append(term.text());
buffer.append('/');
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
}

View File

@ -69,8 +69,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
protected void addClause(BooleanQuery.Builder topLevel, Term term, int docCount,
float boost, TermContext states) {
final TermQuery tq = new TermQuery(term, states);
tq.setBoost(boost);
topLevel.add(tq, BooleanClause.Occur.SHOULD);
topLevel.add(new BoostQuery(tq, boost), BooleanClause.Occur.SHOULD);
}
@Override
@ -95,9 +94,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException {
final Query bq = SCORING_BOOLEAN_REWRITE.rewrite(reader, query);
// strip the scores off
final Query result = new ConstantScoreQuery(bq);
result.setBoost(query.getBoost());
return result;
return new ConstantScoreQuery(bq);
}
};
@ -120,7 +117,7 @@ public abstract class ScoringRewrite<B> extends TermCollectingRewrite<B> {
final int pos = sort[i];
final Term term = new Term(query.getField(), col.terms.get(pos, new BytesRef()));
assert termStates[pos].hasOnlyRealTerms() == false || reader.docFreq(term) == termStates[pos].docFreq();
addClause(builder, term, termStates[pos].docFreq(), query.getBoost() * boost[pos], termStates[pos]);
addClause(builder, term, termStates[pos].docFreq(), boost[pos], termStates[pos]);
}
}
return build(builder);

View File

@ -32,7 +32,6 @@ import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ToStringUtils;
/**
* A Query that matches documents containing a term. This may be combined with
@ -73,7 +72,7 @@ public class TermQuery extends Query {
termStats = new TermStatistics(term.bytes(), docFreq, totalTermFreq);
}
this.stats = similarity.computeWeight(getBoost(), collectionStats, termStats);
this.stats = similarity.computeWeight(collectionStats, termStats);
}
@Override
@ -92,8 +91,8 @@ public class TermQuery extends Query {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
stats.normalize(queryNorm, topLevelBoost);
public void normalize(float queryNorm, float boost) {
stats.normalize(queryNorm, boost);
}
@Override
@ -207,7 +206,6 @@ public class TermQuery extends Query {
buffer.append(":");
}
buffer.append(term.text());
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -19,7 +19,6 @@ package org.apache.lucene.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
@ -121,7 +120,6 @@ public class TermRangeQuery extends AutomatonQuery {
buffer.append(" TO ");
buffer.append(upperTerm != null ? ("*".equals(Term.toString(upperTerm)) ? "\\*" : Term.toString(upperTerm)) : "*");
buffer.append(includeUpper ? ']' : '}');
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -160,7 +160,7 @@ public abstract class TopTermsRewrite<B> extends TermCollectingRewrite<B> {
for (final ScoreTerm st : scoreTerms) {
final Term term = new Term(query.field, st.bytes.toBytesRef());
addClause(b, term, st.termState.docFreq(), query.getBoost() * st.boost, st.termState); // add to query
addClause(b, term, st.termState.docFreq(), st.boost, st.termState); // add to query
}
return build(b);
}

View File

@ -98,9 +98,12 @@ public final class UsageTrackingQueryCachingPolicy implements QueryCachingPolicy
@Override
public void onUse(Query query) {
// call possible Query clone and hashCode outside of sync block
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
// call hashCode outside of sync block
// in case it's somewhat expensive:
int hashCode = QueryCache.cacheKey(query).hashCode();
int hashCode = query.hashCode();
// we only track hash codes to avoid holding references to possible
// large queries; this may cause rare false positives, but at worse
@ -111,10 +114,12 @@ public final class UsageTrackingQueryCachingPolicy implements QueryCachingPolicy
}
int frequency(Query query) {
assert query instanceof BoostQuery == false;
assert query instanceof ConstantScoreQuery == false;
// call possible Query clone and hashCode outside of sync block
// call hashCode outside of sync block
// in case it's somewhat expensive:
int hashCode = QueryCache.cacheKey(query).hashCode();
int hashCode = query.hashCode();
synchronized (this) {
return recentlyUsedFilters.frequency(hashCode);

View File

@ -94,8 +94,8 @@ public abstract class Weight {
/** The value for normalization of contained query clauses (e.g. sum of squared weights). */
public abstract float getValueForNormalization() throws IOException;
/** Assigns the query normalization factor and boost from parent queries to this. */
public abstract void normalize(float norm, float topLevelBoost);
/** Assigns the query normalization factor and boost to this. */
public abstract void normalize(float norm, float boost);
/**
* Returns a {@link Scorer} which can iterate in order over all matching

View File

@ -21,7 +21,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.ToStringUtils;
import org.apache.lucene.util.automaton.Automata;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Operations;
@ -121,7 +120,6 @@ public class WildcardQuery extends AutomatonQuery {
buffer.append(":");
}
buffer.append(term.text());
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
}

View File

@ -274,8 +274,8 @@
* <li><b>Index-time boost</b> by calling
* {@link org.apache.lucene.document.Field#setBoost(float) Field.setBoost()} before a document is
* added to the index.</li>
* <li><b>Query-time boost</b> by setting a boost on a query clause, calling
* {@link org.apache.lucene.search.Query#setBoost(float) Query.setBoost()}.</li>
* <li><b>Query-time boost</b> by applying a boost to a query by wrapping with
* {@link org.apache.lucene.search.BoostQuery}.</li>
* </ul>
* <p>Indexing time boosts are pre-processed for storage efficiency and written to
* storage for a field as follows:
@ -366,7 +366,7 @@
* <li>{@link org.apache.lucene.search.Query#rewrite(org.apache.lucene.index.IndexReader) rewrite(IndexReader reader)} &mdash; Rewrites queries into primitive queries. Primitive queries are:
* {@link org.apache.lucene.search.TermQuery TermQuery},
* {@link org.apache.lucene.search.BooleanQuery BooleanQuery}, <span
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean)}</span></li>
* >and other queries that implement {@link org.apache.lucene.search.Query#createWeight(IndexSearcher,boolean) createWeight(IndexSearcher searcher,boolean,float)}</span></li>
* </ol>
* <a name="weightClass"></a>
* <h3>The Weight Interface</h3>
@ -389,10 +389,10 @@
* For example, with {@link org.apache.lucene.search.similarities.TFIDFSimilarity Lucene's classic vector-space formula}, this
* is implemented as the sum of squared weights: <code>(idf * boost)<sup>2</sup></code></li>
* <li>
* {@link org.apache.lucene.search.Weight#normalize(float,float) normalize(float norm, float topLevelBoost)} &mdash;
* {@link org.apache.lucene.search.Weight#normalize(float,float) normalize(float norm, float boost)} &mdash;
* Performs query normalization:
* <ul>
* <li><code>topLevelBoost</code>: A query-boost factor from any wrapping queries that should be multiplied into every
* <li><code>boost</code>: A query-boost factor from any wrapping queries that should be multiplied into every
* document's score. For example, a TermQuery that is wrapped within a BooleanQuery with a boost of <code>5</code> would
* receive this value at this time. This allows the TermQuery (the leaf node in this case) to compute this up-front
* a single time (e.g. by multiplying into the IDF), rather than for every document.</li>

View File

@ -100,11 +100,9 @@ public class PayloadSpanUtil {
}
SpanNearQuery sp = new SpanNearQuery(clauses, slop, inorder);
sp.setBoost(query.getBoost());
getPayloads(payloads, sp);
} else if (query instanceof TermQuery) {
SpanTermQuery stq = new SpanTermQuery(((TermQuery) query).getTerm());
stq.setBoost(query.getBoost());
getPayloads(payloads, stq);
} else if (query instanceof SpanQuery) {
getPayloads(payloads, (SpanQuery) query);
@ -163,7 +161,6 @@ public class PayloadSpanUtil {
SpanNearQuery sp = new SpanNearQuery(clauses, slop + positionGaps,
inorder);
sp.setBoost(query.getBoost());
getPayloads(payloads, sp);
}
}

View File

@ -153,17 +153,9 @@ public class SpanPayloadCheckQuery extends SpanQuery {
buffer.append(';');
}
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
@Override
public SpanPayloadCheckQuery clone() {
SpanPayloadCheckQuery result = new SpanPayloadCheckQuery((SpanQuery) match.clone(), payloadToMatch);
result.setBoost(getBoost());
return result;
}
@Override
public boolean equals(Object o) {
if (! super.equals(o)) {

View File

@ -199,7 +199,7 @@ public class BM25Similarity extends Similarity {
}
@Override
public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
Explanation idf = termStats.length == 1 ? idfExplain(collectionStats, termStats[0]) : idfExplain(collectionStats, termStats);
float avgdl = avgFieldLength(collectionStats);
@ -209,7 +209,7 @@ public class BM25Similarity extends Similarity {
for (int i = 0; i < cache.length; i++) {
cache[i] = k1 * ((1 - b) + b * decodeNormValue((byte)i) / avgdl);
}
return new BM25Stats(collectionStats.field(), idf, queryBoost, avgdl, cache);
return new BM25Stats(collectionStats.field(), idf, avgdl, cache);
}
@Override
@ -260,10 +260,8 @@ public class BM25Similarity extends Similarity {
private final Explanation idf;
/** The average document length. */
private final float avgdl;
/** query's inner boost */
private final float queryBoost;
/** query's outer boost (only for explain) */
private float topLevelBoost;
/** query boost */
private float boost;
/** weight (idf * boost) */
private float weight;
/** field name, for pulling norms */
@ -271,26 +269,25 @@ public class BM25Similarity extends Similarity {
/** precomputed norm[256] with k1 * ((1 - b) + b * dl / avgdl) */
private final float cache[];
BM25Stats(String field, Explanation idf, float queryBoost, float avgdl, float cache[]) {
BM25Stats(String field, Explanation idf, float avgdl, float cache[]) {
this.field = field;
this.idf = idf;
this.queryBoost = queryBoost;
this.avgdl = avgdl;
this.cache = cache;
normalize(1f, 1f);
}
@Override
public float getValueForNormalization() {
// we return a TF-IDF like normalization to be nice, but we don't actually normalize ourselves.
final float queryWeight = idf.getValue() * queryBoost;
return queryWeight * queryWeight;
return weight * weight;
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
public void normalize(float queryNorm, float boost) {
// we don't normalize with queryNorm at all, we just capture the top-level boost
this.topLevelBoost = topLevelBoost;
this.weight = idf.getValue() * queryBoost * topLevelBoost;
this.boost = boost;
this.weight = idf.getValue() * boost;
}
}
@ -315,7 +312,7 @@ public class BM25Similarity extends Similarity {
}
private Explanation explainScore(int doc, Explanation freq, BM25Stats stats, NumericDocValues norms) {
Explanation boostExpl = Explanation.match(stats.queryBoost * stats.topLevelBoost, "boost");
Explanation boostExpl = Explanation.match(stats.boost, "boost");
List<Explanation> subs = new ArrayList<>();
if (boostExpl.getValue() != 1.0f)
subs.add(boostExpl);

View File

@ -38,20 +38,15 @@ public class BasicStats extends Similarity.SimWeight {
// -------------------------- Boost-related stuff --------------------------
/** Query's inner boost. */
protected final float queryBoost;
/** Any outer query's boost. */
protected float topLevelBoost;
/** For most Similarities, the immediate and the top level query boosts are
* not handled differently. Hence, this field is just the product of the
* other two. */
protected float totalBoost;
protected float boost;
/** Constructor. Sets the query boost. */
public BasicStats(String field, float queryBoost) {
/** Constructor. */
public BasicStats(String field) {
this.field = field;
this.queryBoost = queryBoost;
this.totalBoost = queryBoost;
normalize(1f, 1f);
}
// ------------------------- Getter/setter methods -------------------------
@ -128,19 +123,17 @@ public class BasicStats extends Similarity.SimWeight {
* {@link #normalize(float, float)}, etc.
*/
protected float rawNormalizationValue() {
return queryBoost;
return boost;
}
/** No normalization is done. {@code topLevelBoost} is saved in the object,
* however. */
/** No normalization is done. {@code boost} is saved in the object, however. */
@Override
public void normalize(float queryNorm, float topLevelBoost) {
this.topLevelBoost = topLevelBoost;
totalBoost = queryBoost * topLevelBoost;
public void normalize(float queryNorm, float boost) {
this.boost = boost;
}
/** Returns the total boost. */
public float getTotalBoost() {
return totalBoost;
public float getBoost() {
return boost;
}
}

View File

@ -109,15 +109,15 @@ public class DFRSimilarity extends SimilarityBase {
@Override
protected float score(BasicStats stats, float freq, float docLen) {
float tfn = normalization.tfn(stats, freq, docLen);
return stats.getTotalBoost() *
return stats.getBoost() *
basicModel.score(stats, tfn) * afterEffect.score(stats, tfn);
}
@Override
protected void explain(List<Explanation> subs,
BasicStats stats, int doc, float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
if (stats.getBoost() != 1.0f) {
subs.add(Explanation.match(stats.getBoost(), "boost"));
}
Explanation normExpl = normalization.explain(stats, freq, docLen);

View File

@ -96,7 +96,7 @@ public class IBSimilarity extends SimilarityBase {
@Override
protected float score(BasicStats stats, float freq, float docLen) {
return stats.getTotalBoost() *
return stats.getBoost() *
distribution.score(
stats,
normalization.tfn(stats, freq, docLen),
@ -106,8 +106,8 @@ public class IBSimilarity extends SimilarityBase {
@Override
protected void explain(
List<Explanation> subs, BasicStats stats, int doc, float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
if (stats.getBoost() != 1.0f) {
subs.add(Explanation.match(stats.getBoost(), "boost"));
}
Explanation normExpl = normalization.explain(stats, freq, docLen);
Explanation lambdaExpl = lambda.explain(stats);

View File

@ -64,7 +64,7 @@ public class LMDirichletSimilarity extends LMSimilarity {
@Override
protected float score(BasicStats stats, float freq, float docLen) {
float score = stats.getTotalBoost() * (float)(Math.log(1 + freq /
float score = stats.getBoost() * (float)(Math.log(1 + freq /
(mu * ((LMStats)stats).getCollectionProbability())) +
Math.log(mu / (docLen + mu)));
return score > 0.0f ? score : 0.0f;
@ -73,8 +73,8 @@ public class LMDirichletSimilarity extends LMSimilarity {
@Override
protected void explain(List<Explanation> subs, BasicStats stats, int doc,
float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
if (stats.getBoost() != 1.0f) {
subs.add(Explanation.match(stats.getBoost(), "boost"));
}
subs.add(Explanation.match(mu, "mu"));

View File

@ -52,7 +52,7 @@ public class LMJelinekMercerSimilarity extends LMSimilarity {
@Override
protected float score(BasicStats stats, float freq, float docLen) {
return stats.getTotalBoost() *
return stats.getBoost() *
(float)Math.log(1 +
((1 - lambda) * freq / docLen) /
(lambda * ((LMStats)stats).getCollectionProbability()));
@ -61,8 +61,8 @@ public class LMJelinekMercerSimilarity extends LMSimilarity {
@Override
protected void explain(List<Explanation> subs, BasicStats stats, int doc,
float freq, float docLen) {
if (stats.getTotalBoost() != 1.0f) {
subs.add(Explanation.match(stats.getTotalBoost(), "boost"));
if (stats.getBoost() != 1.0f) {
subs.add(Explanation.match(stats.getBoost(), "boost"));
}
subs.add(Explanation.match(lambda, "lambda"));
super.explain(subs, stats, doc, freq, docLen);

View File

@ -54,8 +54,8 @@ public abstract class LMSimilarity extends SimilarityBase {
}
@Override
protected BasicStats newStats(String field, float queryBoost) {
return new LMStats(field, queryBoost);
protected BasicStats newStats(String field) {
return new LMStats(field);
}
/**
@ -108,8 +108,8 @@ public abstract class LMSimilarity extends SimilarityBase {
/**
* Creates LMStats for the provided field and query-time boost
*/
public LMStats(String field, float queryBoost) {
super(field, queryBoost);
public LMStats(String field) {
super(field);
}
/**

View File

@ -50,10 +50,10 @@ public class MultiSimilarity extends Similarity {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
SimWeight subStats[] = new SimWeight[sims.length];
for (int i = 0; i < subStats.length; i++) {
subStats[i] = sims[i].computeWeight(queryBoost, collectionStats, termStats);
subStats[i] = sims[i].computeWeight(collectionStats, termStats);
}
return new MultiStats(subStats);
}
@ -120,9 +120,9 @@ public class MultiSimilarity extends Similarity {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
public void normalize(float queryNorm, float boost) {
for (SimWeight stat : subStats) {
stat.normalize(queryNorm, topLevelBoost);
stat.normalize(queryNorm, boost);
}
}
}

View File

@ -46,10 +46,10 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
}
@Override
public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
PerFieldSimWeight weight = new PerFieldSimWeight();
weight.delegate = get(collectionStats.field());
weight.delegateWeight = weight.delegate.computeWeight(queryBoost, collectionStats, termStats);
weight.delegateWeight = weight.delegate.computeWeight(collectionStats, termStats);
return weight;
}
@ -74,8 +74,8 @@ public abstract class PerFieldSimilarityWrapper extends Similarity {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
delegateWeight.normalize(queryNorm, topLevelBoost);
public void normalize(float queryNorm, float boost) {
delegateWeight.normalize(queryNorm, boost);
}
}
}

View File

@ -78,7 +78,7 @@ import java.util.Collections;
* <a name="querytime">Query time</a>
* At query-time, Queries interact with the Similarity via these steps:
* <ol>
* <li>The {@link #computeWeight(float, CollectionStatistics, TermStatistics...)} method is called a single time,
* <li>The {@link #computeWeight(CollectionStatistics, TermStatistics...)} method is called a single time,
* allowing the implementation to compute any statistics (such as IDF, average document length, etc)
* across <i>the entire collection</i>. The {@link TermStatistics} and {@link CollectionStatistics} passed in
* already contain all of the raw statistics involved, so a Similarity can freely use any combination
@ -158,16 +158,15 @@ public abstract class Similarity {
/**
* Compute any collection-level weight (e.g. IDF, average document length, etc) needed for scoring a query.
*
* @param queryBoost the query-time boost.
* @param collectionStats collection-level statistics, such as the number of tokens in the collection.
* @param termStats term-level statistics, such as the document frequency of a term across the collection.
* @return SimWeight object with the information this Similarity needs to score a query.
*/
public abstract SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats);
public abstract SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats);
/**
* Creates a new {@link Similarity.SimScorer} to score matching documents from a segment of the inverted index.
* @param weight collection information from {@link #computeWeight(float, CollectionStatistics, TermStatistics...)}
* @param weight collection information from {@link #computeWeight(CollectionStatistics, TermStatistics...)}
* @param context segment of the inverted index to be scored.
* @return SloppySimScorer for scoring documents across <code>context</code>
* @throws IOException if there is a low-level I/O error
@ -243,8 +242,11 @@ public abstract class Similarity {
* <p>
* NOTE: a Similarity implementation might not use this normalized value at all,
* it's not required. However, it's usually a good idea to at least incorporate
* the topLevelBoost (e.g. from an outer BooleanQuery) into its score.
* the boost into its score.
* <p>
* NOTE: If this method is called several times, it behaves as if only the
* last call was performed.
*/
public abstract void normalize(float queryNorm, float topLevelBoost);
public abstract void normalize(float queryNorm, float boost);
}
}

View File

@ -83,18 +83,18 @@ public abstract class SimilarityBase extends Similarity {
}
@Override
public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
BasicStats stats[] = new BasicStats[termStats.length];
for (int i = 0; i < termStats.length; i++) {
stats[i] = newStats(collectionStats.field(), queryBoost);
stats[i] = newStats(collectionStats.field());
fillBasicStats(stats[i], collectionStats, termStats[i]);
}
return stats.length == 1 ? stats[0] : new MultiSimilarity.MultiStats(stats);
}
/** Factory method to return a custom stats object */
protected BasicStats newStats(String field, float queryBoost) {
return new BasicStats(field, queryBoost);
protected BasicStats newStats(String field) {
return new BasicStats(field);
}
/** Fills all member fields defined in {@code BasicStats} in {@code stats}.

View File

@ -414,7 +414,7 @@ import org.apache.lucene.util.BytesRef;
* <tr>
* <td valign="middle" align="right" rowspan="1">
* {@link org.apache.lucene.search.Weight#getValueForNormalization() sumOfSquaredWeights} &nbsp; = &nbsp;
* {@link org.apache.lucene.search.Query#getBoost() q.getBoost()} <sup><big>2</big></sup>
* {@link org.apache.lucene.search.BoostQuery#getBoost() q.getBoost()} <sup><big>2</big></sup>
* &nbsp;&middot;&nbsp;
* </td>
* <td valign="bottom" align="center" rowspan="1" style="text-align: center">
@ -443,13 +443,13 @@ import org.apache.lucene.util.BytesRef;
* is a search time boost of term <i>t</i> in the query <i>q</i> as
* specified in the query text
* (see <A HREF="{@docRoot}/../queryparser/org/apache/lucene/queryparser/classic/package-summary.html#Boosting_a_Term">query syntax</A>),
* or as set by application calls to
* {@link org.apache.lucene.search.Query#setBoost(float) setBoost()}.
* or as set by wrapping with
* {@link org.apache.lucene.search.BoostQuery#BoostQuery(org.apache.lucene.search.Query, float) BoostQuery}.
* Notice that there is really no direct API for accessing a boost of one term in a multi term query,
* but rather multi terms are represented in a query as multi
* {@link org.apache.lucene.search.TermQuery TermQuery} objects,
* and so the boost of a term in the query is accessible by calling the sub-query
* {@link org.apache.lucene.search.Query#getBoost() getBoost()}.
* {@link org.apache.lucene.search.BoostQuery#getBoost() getBoost()}.
* <br>&nbsp;<br>
* </li>
*
@ -684,11 +684,11 @@ public abstract class TFIDFSimilarity extends Similarity {
public abstract float scorePayload(int doc, int start, int end, BytesRef payload);
@Override
public final SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public final SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
final Explanation idf = termStats.length == 1
? idfExplain(collectionStats, termStats[0])
: idfExplain(collectionStats, termStats);
return new IDFStats(collectionStats.field(), idf, queryBoost);
return new IDFStats(collectionStats.field(), idf);
}
@Override
@ -738,16 +738,15 @@ public abstract class TFIDFSimilarity extends Similarity {
/** The idf and its explanation */
private final Explanation idf;
private float queryNorm;
private float boost;
private float queryWeight;
private final float queryBoost;
private float value;
public IDFStats(String field, Explanation idf, float queryBoost) {
public IDFStats(String field, Explanation idf) {
// TODO: Validate?
this.field = field;
this.idf = idf;
this.queryBoost = queryBoost;
this.queryWeight = idf.getValue() * queryBoost; // compute query weight
normalize(1f, 1f);
}
@Override
@ -757,9 +756,10 @@ public abstract class TFIDFSimilarity extends Similarity {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
this.queryNorm = queryNorm * topLevelBoost;
queryWeight *= this.queryNorm; // normalize query weight
public void normalize(float queryNorm, float boost) {
this.boost = boost;
this.queryNorm = queryNorm;
queryWeight = queryNorm * boost * idf.getValue();
value = queryWeight * idf.getValue(); // idf for document
}
}
@ -767,8 +767,8 @@ public abstract class TFIDFSimilarity extends Similarity {
private Explanation explainQuery(IDFStats stats) {
List<Explanation> subs = new ArrayList<>();
Explanation boostExpl = Explanation.match(stats.queryBoost, "boost");
if (stats.queryBoost != 1.0f)
Explanation boostExpl = Explanation.match(stats.boost, "boost");
if (stats.boost != 1.0f)
subs.add(boostExpl);
subs.add(stats.idf);

View File

@ -20,7 +20,6 @@ package org.apache.lucene.search.spans;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
import java.util.Objects;
@ -68,9 +67,9 @@ import java.util.Objects;
* but with the term statistics of the real field. This may lead to exceptions,
* poor performance, and unexpected scoring behaviour.
*/
public class FieldMaskingSpanQuery extends SpanQuery {
private SpanQuery maskedQuery;
private String field;
public final class FieldMaskingSpanQuery extends SpanQuery {
private final SpanQuery maskedQuery;
private final String field;
public FieldMaskingSpanQuery(SpanQuery maskedQuery, String maskedField) {
this.maskedQuery = Objects.requireNonNull(maskedQuery);
@ -100,15 +99,10 @@ public class FieldMaskingSpanQuery extends SpanQuery {
SpanQuery rewritten = (SpanQuery) maskedQuery.rewrite(reader);
if (rewritten != maskedQuery) {
clone = (FieldMaskingSpanQuery) this.clone();
clone.maskedQuery = rewritten;
return new FieldMaskingSpanQuery(rewritten, field);
}
if (clone != null) {
return clone;
} else {
return this;
}
return super.rewrite(reader);
}
@Override
@ -117,7 +111,6 @@ public class FieldMaskingSpanQuery extends SpanQuery {
buffer.append("mask(");
buffer.append(maskedQuery.toString(field));
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
buffer.append(" as ");
buffer.append(this.field);
return buffer.toString();

View File

@ -0,0 +1,189 @@
package org.apache.lucene.search.spans;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
/**
* Counterpart of {@link BoostQuery} for spans.
*/
public final class SpanBoostQuery extends SpanQuery {
/** By default we enclose the wrapped query within parenthesis, but this is
* not required for all queries, so we use a whitelist of queries that don't
* need parenthesis to have a better toString(). */
private static final Set<Class<? extends SpanQuery>> NO_PARENS_REQUIRED_QUERIES = Collections.unmodifiableSet(
new HashSet<>(Arrays.asList(
SpanTermQuery.class,
SpanNearQuery.class,
SpanOrQuery.class,
SpanFirstQuery.class,
SpanContainingQuery.class,
SpanContainQuery.class,
SpanNotQuery.class,
SpanWithinQuery.class
)));
private final SpanQuery query;
private final float boost;
/** Sole constructor: wrap {@code query} in such a way that the produced
* scores will be boosted by {@code boost}. */
public SpanBoostQuery(SpanQuery query, float boost) {
this.query = Objects.requireNonNull(query);
this.boost = boost;
}
/**
* Return the wrapped {@link SpanQuery}.
*/
public SpanQuery getQuery() {
return query;
}
/**
* Return the applied boost.
*/
public float getBoost() {
return boost;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) {
return false;
}
SpanBoostQuery that = (SpanBoostQuery) obj;
return query.equals(that.query)
&& Float.floatToIntBits(boost) == Float.floatToIntBits(that.boost);
}
@Override
public int hashCode() {
int h = super.hashCode();
h = 31 * h + query.hashCode();
h = 31 * h + Float.floatToIntBits(boost);
return h;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
if (boost == 1f) {
return query;
}
final SpanQuery rewritten = (SpanQuery) query.rewrite(reader);
if (query != rewritten) {
return new SpanBoostQuery(rewritten, boost);
}
if (query.getClass() == SpanBoostQuery.class) {
SpanBoostQuery in = (SpanBoostQuery) query;
return new SpanBoostQuery(in.query, boost * in.boost);
}
return super.rewrite(reader);
}
@Override
public String toString(String field) {
boolean needsParens = NO_PARENS_REQUIRED_QUERIES.contains(query.getClass()) == false;
StringBuilder builder = new StringBuilder();
if (needsParens) {
builder.append("(");
}
builder.append(query.toString(field));
if (needsParens) {
builder.append(")");
}
builder.append("^");
builder.append(boost);
return builder.toString();
}
@Override
public String getField() {
return query.getField();
}
@Override
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
final SpanWeight weight = query.createWeight(searcher, needsScores);
if (needsScores == false) {
return weight;
}
Map<Term, TermContext> terms = new TreeMap<>();
weight.extractTermContexts(terms);
weight.normalize(1f, boost);
return new SpanWeight(this, searcher, terms) {
@Override
public void extractTerms(Set<Term> terms) {
weight.extractTerms(terms);
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return weight.explain(context, doc);
}
@Override
public float getValueForNormalization() throws IOException {
return weight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
weight.normalize(norm, SpanBoostQuery.this.boost * boost);
}
@Override
public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) throws IOException {
return weight.getSpans(ctx, requiredPostings);
}
@Override
public Scorer scorer(LeafReaderContext context) throws IOException {
return weight.scorer(context);
}
@Override
public void extractTermContexts(Map<Term,TermContext> contexts) {
weight.extractTermContexts(contexts);
}
};
}
}

View File

@ -35,7 +35,7 @@ abstract class SpanContainQuery extends SpanQuery implements Cloneable {
SpanQuery big;
SpanQuery little;
SpanContainQuery(SpanQuery big, SpanQuery little, float boost) {
SpanContainQuery(SpanQuery big, SpanQuery little) {
this.big = Objects.requireNonNull(big);
this.little = Objects.requireNonNull(little);
Objects.requireNonNull(big.getField());
@ -43,7 +43,6 @@ abstract class SpanContainQuery extends SpanQuery implements Cloneable {
if (! big.getField().equals(little.getField())) {
throw new IllegalArgumentException("big and little not same field");
}
this.setBoost(boost);
}
@Override
@ -104,23 +103,21 @@ abstract class SpanContainQuery extends SpanQuery implements Cloneable {
return buffer.toString();
}
@Override
public abstract SpanContainQuery clone();
@Override
public Query rewrite(IndexReader reader) throws IOException {
SpanContainQuery clone = null;
SpanQuery rewrittenBig = (SpanQuery) big.rewrite(reader);
if (rewrittenBig != big) {
clone = this.clone();
clone.big = rewrittenBig;
}
SpanQuery rewrittenLittle = (SpanQuery) little.rewrite(reader);
if (rewrittenLittle != little) {
if (clone == null) clone = this.clone();
clone.little = rewrittenLittle;
if (big != rewrittenBig || little != rewrittenLittle) {
try {
SpanContainQuery clone = (SpanContainQuery) super.clone();
clone.big = rewrittenBig;
clone.little = rewrittenLittle;
return clone;
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
return (clone != null) ? clone : this;
return super.rewrite(reader);
}
@Override

View File

@ -21,21 +21,20 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.Bits;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
/** Keep matches that contain another Spans. */
public class SpanContainingQuery extends SpanContainQuery {
public final class SpanContainingQuery extends SpanContainQuery {
/** Construct a SpanContainingQuery matching spans from <code>big</code>
* that contain at least one spans from <code>little</code>.
* This query has the boost of <code>big</code>.
* <code>big</code> and <code>little</code> must be in the same field.
*/
public SpanContainingQuery(SpanQuery big, SpanQuery little) {
super(big, little, big.getBoost());
super(big, little);
}
@Override
@ -43,13 +42,6 @@ public class SpanContainingQuery extends SpanContainQuery {
return toString(field, "SpanContaining");
}
@Override
public SpanContainingQuery clone() {
return new SpanContainingQuery(
(SpanQuery) big.clone(),
(SpanQuery) little.clone());
}
@Override
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SpanWeight bigWeight = big.createWeight(searcher, false);

View File

@ -18,7 +18,6 @@ package org.apache.lucene.search.spans;
*/
import org.apache.lucene.search.spans.FilterSpans.AcceptStatus;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
@ -54,15 +53,7 @@ public class SpanFirstQuery extends SpanPositionRangeQuery {
buffer.append(", ");
buffer.append(end);
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
@Override
public SpanFirstQuery clone() {
SpanFirstQuery spanFirstQuery = new SpanFirstQuery((SpanQuery) match.clone(), end);
spanFirstQuery.setBoost(getBoost());
return spanFirstQuery;
}
}

View File

@ -114,10 +114,6 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
String queryStr = query.toString(field);
builder.append(queryStr);
builder.append(")");
if (getBoost() != 1F) {
builder.append('^');
builder.append(getBoost());
}
return builder.toString();
}
@ -126,7 +122,6 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
final Query q = query.rewrite(reader);
if (!(q instanceof SpanQuery))
throw new UnsupportedOperationException("You can only use SpanMultiTermQueryWrapper with a suitable SpanRewriteMethod.");
q.setBoost(q.getBoost() * getBoost()); // multiply boost
return q;
}
@ -179,7 +174,6 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
@Override
protected void addClause(SpanOrQuery topLevel, Term term, int docCount, float boost, TermContext states) {
final SpanTermQuery q = new SpanTermQuery(term, states);
q.setBoost(boost);
topLevel.addClause(q);
}
};
@ -228,7 +222,6 @@ public class SpanMultiTermQueryWrapper<Q extends MultiTermQuery> extends SpanQue
@Override
protected void addClause(SpanOrQuery topLevel, Term term, int docFreq, float boost, TermContext states) {
final SpanTermQuery q = new SpanTermQuery(term, states);
q.setBoost(boost);
topLevel.addClause(q);
}
};

View File

@ -173,7 +173,6 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
buffer.append(", ");
buffer.append(inOrder);
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
@ -235,34 +234,24 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
@Override
public Query rewrite(IndexReader reader) throws IOException {
SpanNearQuery clone = null;
boolean actuallyRewritten = false;
List<SpanQuery> rewrittenClauses = new ArrayList<>();
for (int i = 0 ; i < clauses.size(); i++) {
SpanQuery c = clauses.get(i);
SpanQuery query = (SpanQuery) c.rewrite(reader);
if (query != c) { // clause rewrote: must clone
if (clone == null)
clone = this.clone();
clone.clauses.set(i,query);
actuallyRewritten |= query != c;
rewrittenClauses.add(query);
}
if (actuallyRewritten) {
try {
SpanNearQuery rewritten = (SpanNearQuery) clone();
rewritten.clauses = rewrittenClauses;
return rewritten;
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
if (clone != null) {
return clone; // some clauses rewrote
} else {
return this; // no clauses rewrote
}
}
@Override
public SpanNearQuery clone() {
int sz = clauses.size();
SpanQuery[] newClauses = new SpanQuery[sz];
for (int i = 0; i < sz; i++) {
newClauses[i] = (SpanQuery) clauses.get(i).clone();
}
SpanNearQuery spanNearQuery = new SpanNearQuery(newClauses, slop, inOrder);
spanNearQuery.setBoost(getBoost());
return spanNearQuery;
return super.rewrite(reader);
}
/** Returns true iff <code>o</code> is equal to this. */

View File

@ -35,7 +35,7 @@ import java.util.Set;
/** Removes matches which overlap with another SpanQuery or which are
* within x tokens before or y tokens after another SpanQuery.
*/
public class SpanNotQuery extends SpanQuery implements Cloneable {
public final class SpanNotQuery extends SpanQuery {
private SpanQuery include;
private SpanQuery exclude;
private final int pre;
@ -89,17 +89,9 @@ public class SpanNotQuery extends SpanQuery implements Cloneable {
buffer.append(", ");
buffer.append(Integer.toString(post));
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
@Override
public SpanNotQuery clone() {
SpanNotQuery spanNotQuery = new SpanNotQuery((SpanQuery) include.clone(),
(SpanQuery) exclude.clone(), pre, post);
spanNotQuery.setBoost(getBoost());
return spanNotQuery;
}
@Override
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
@ -200,24 +192,12 @@ public class SpanNotQuery extends SpanQuery implements Cloneable {
@Override
public Query rewrite(IndexReader reader) throws IOException {
SpanNotQuery clone = null;
SpanQuery rewrittenInclude = (SpanQuery) include.rewrite(reader);
if (rewrittenInclude != include) {
clone = this.clone();
clone.include = rewrittenInclude;
}
SpanQuery rewrittenExclude = (SpanQuery) exclude.rewrite(reader);
if (rewrittenExclude != exclude) {
if (clone == null) clone = this.clone();
clone.exclude = rewrittenExclude;
}
if (clone != null) {
return clone; // some clauses rewrote
} else {
return this; // no clauses rewrote
if (rewrittenInclude != include || rewrittenExclude != exclude) {
return new SpanNotQuery(rewrittenInclude, rewrittenExclude);
}
return super.rewrite(reader);
}
/** Returns true iff <code>o</code> is equal to this. */

View File

@ -27,7 +27,6 @@ import org.apache.lucene.search.DisjunctionDISIApproximation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
import java.util.ArrayList;
@ -39,7 +38,7 @@ import java.util.Set;
/** Matches the union of its clauses.
*/
public class SpanOrQuery extends SpanQuery implements Cloneable {
public final class SpanOrQuery extends SpanQuery {
private List<SpanQuery> clauses;
private String field;
@ -71,36 +70,20 @@ public class SpanOrQuery extends SpanQuery implements Cloneable {
@Override
public String getField() { return field; }
@Override
public SpanOrQuery clone() {
int sz = clauses.size();
SpanQuery[] newClauses = new SpanQuery[sz];
for (int i = 0; i < sz; i++) {
newClauses[i] = (SpanQuery) clauses.get(i).clone();
}
SpanOrQuery soq = new SpanOrQuery(newClauses);
soq.setBoost(getBoost());
return soq;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
SpanOrQuery clone = null;
SpanOrQuery rewritten = new SpanOrQuery();
boolean actuallyRewritten = false;
for (int i = 0 ; i < clauses.size(); i++) {
SpanQuery c = clauses.get(i);
SpanQuery query = (SpanQuery) c.rewrite(reader);
if (query != c) { // clause rewrote: must clone
if (clone == null)
clone = this.clone();
clone.clauses.set(i,query);
}
actuallyRewritten |= query != c;
rewritten.addClause(query);
}
if (clone != null) {
return clone; // some clauses rewrote
} else {
return this; // no clauses rewrote
if (actuallyRewritten) {
return rewritten;
}
return super.rewrite(reader);
}
@Override
@ -116,7 +99,6 @@ public class SpanOrQuery extends SpanQuery implements Cloneable {
}
}
buffer.append("])");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -106,19 +106,18 @@ public abstract class SpanPositionCheckQuery extends SpanQuery implements Clonea
@Override
public Query rewrite(IndexReader reader) throws IOException {
SpanPositionCheckQuery clone = null;
SpanQuery rewritten = (SpanQuery) match.rewrite(reader);
if (rewritten != match) {
clone = (SpanPositionCheckQuery) this.clone();
clone.match = rewritten;
try {
SpanPositionCheckQuery clone = (SpanPositionCheckQuery) this.clone();
clone.match = rewritten;
return clone;
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
if (clone != null) {
return clone; // some clauses rewrote
} else {
return this; // no clauses rewrote
}
return super.rewrite(reader);
}
/** Returns true iff <code>o</code> is equal to this. */

View File

@ -18,7 +18,6 @@ package org.apache.lucene.search.spans;
import org.apache.lucene.search.spans.FilterSpans.AcceptStatus;
import org.apache.lucene.util.ToStringUtils;
import java.io.IOException;
@ -70,17 +69,9 @@ public class SpanPositionRangeQuery extends SpanPositionCheckQuery {
buffer.append(", ").append(start).append(", ");
buffer.append(end);
buffer.append(")");
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}
@Override
public SpanPositionRangeQuery clone() {
SpanPositionRangeQuery result = new SpanPositionRangeQuery((SpanQuery) match.clone(), start, end);
result.setBoost(getBoost());
return result;
}
@Override
public boolean equals(Object o) {
if (! super.equals(o)) {

View File

@ -129,7 +129,6 @@ public class SpanTermQuery extends SpanQuery {
buffer.append(term.text());
else
buffer.append(term.toString());
buffer.append(ToStringUtils.boost(getBoost()));
return buffer.toString();
}

View File

@ -101,7 +101,7 @@ public abstract class SpanWeight extends Weight {
i++;
}
CollectionStatistics collectionStats = searcher.collectionStatistics(query.getField());
return searcher.getSimilarity(true).computeWeight(query.getBoost(), collectionStats, termStats);
return searcher.getSimilarity(true).computeWeight(collectionStats, termStats);
}
/**
@ -124,9 +124,9 @@ public abstract class SpanWeight extends Weight {
}
@Override
public void normalize(float queryNorm, float topLevelBoost) {
public void normalize(float queryNorm, float boost) {
if (simWeight != null) {
simWeight.normalize(queryNorm, topLevelBoost);
simWeight.normalize(queryNorm, boost);
}
}

View File

@ -27,7 +27,7 @@ import java.util.ArrayList;
import java.util.Map;
/** Keep matches that are contained within another Spans. */
public class SpanWithinQuery extends SpanContainQuery {
public final class SpanWithinQuery extends SpanContainQuery {
/** Construct a SpanWithinQuery matching spans from <code>little</code>
* that are inside of <code>big</code>.
@ -35,7 +35,7 @@ public class SpanWithinQuery extends SpanContainQuery {
* <code>big</code> and <code>little</code> must be in the same field.
*/
public SpanWithinQuery(SpanQuery big, SpanQuery little) {
super(big, little, little.getBoost());
super(big, little);
}
@Override
@ -43,13 +43,6 @@ public class SpanWithinQuery extends SpanContainQuery {
return toString(field, "SpanWithin");
}
@Override
public SpanWithinQuery clone() {
return new SpanWithinQuery(
(SpanQuery) big.clone(),
(SpanQuery) little.clone());
}
@Override
public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException {
SpanWeight bigWeight = big.createWeight(searcher, false);

View File

@ -24,15 +24,6 @@ public final class ToStringUtils {
private ToStringUtils() {} // no instance
/**
* for printing boost only if not 1.0
*/
public static String boost(float boost) {
if (boost != 1.0f) {
return "^" + Float.toString(boost);
} else return "";
}
public static void byteArray(StringBuilder buffer, byte[] bytes) {
for (int i = 0; i < bytes.length; i++) {
buffer.append("b[").append(i).append("]=").append(bytes[i]);

View File

@ -34,9 +34,8 @@ import org.apache.lucene.search.*;
public class TestSearch extends LuceneTestCase {
public void testNegativeQueryBoost() throws Exception {
Query q = new TermQuery(new Term("foo", "bar"));
q.setBoost(-42f);
assertEquals(-42f, q.getBoost(), 0.0f);
BoostQuery q = new BoostQuery(new TermQuery(new Term("foo", "bar")), -42f);
assertEquals(-42f, q.getBoost(), 0f);
Directory directory = newDirectory();
try {

View File

@ -110,7 +110,7 @@ public class TestCustomNorms extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
throw new UnsupportedOperationException();
}

View File

@ -187,7 +187,7 @@ public class TestNorms extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
throw new UnsupportedOperationException();
}

View File

@ -105,7 +105,7 @@ public class TestUniqueTermCount extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
throw new UnsupportedOperationException();
}

View File

@ -224,7 +224,7 @@ final class JustCompileSearch {
static final class JustCompileSimilarity extends Similarity {
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}

View File

@ -84,7 +84,7 @@ public class TestBooleanCoord extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
return new SimWeight() {
@Override
public float getValueForNormalization() {

View File

@ -102,8 +102,7 @@ public class TestBooleanQuery extends LuceneTestCase {
// LUCENE-2617: make sure that a term not in the index still contributes to the score via coord factor
float score = s.search(q.build(), 10).getMaxScore();
Query subQuery = new TermQuery(new Term("field", "not_in_index"));
subQuery.setBoost(0);
Query subQuery = new BoostQuery(new TermQuery(new Term("field", "not_in_index")), 0f);
q.add(subQuery, BooleanClause.Occur.SHOULD);
float score2 = s.search(q.build(), 10).getMaxScore();
assertEquals(score*.5F, score2, 1e-6);
@ -114,14 +113,12 @@ public class TestBooleanQuery extends LuceneTestCase {
qq.add(clause);
}
PhraseQuery phrase = new PhraseQuery("field", "not_in_index", "another_not_in_index");
phrase.setBoost(0);
qq.add(phrase, BooleanClause.Occur.SHOULD);
qq.add(new BoostQuery(phrase, 0f), BooleanClause.Occur.SHOULD);
score2 = s.search(qq.build(), 10).getMaxScore();
assertEquals(score*(1/3F), score2, 1e-6);
// now test BooleanScorer2
subQuery = new TermQuery(new Term("field", "b"));
subQuery.setBoost(0);
subQuery = new BoostQuery(new TermQuery(new Term("field", "b")), 0f);
q.add(subQuery, BooleanClause.Occur.MUST);
score2 = s.search(q.build(), 10).getMaxScore();
assertEquals(score*(2/3F), score2, 1e-6);
@ -334,7 +331,6 @@ public class TestBooleanQuery extends LuceneTestCase {
}
public void testOneClauseRewriteOptimization() throws Exception {
final float BOOST = 3.5F;
final String FIELD = "content";
final String VALUE = "foo";
@ -343,29 +339,20 @@ public class TestBooleanQuery extends LuceneTestCase {
IndexReader r = DirectoryReader.open(dir);
TermQuery expected = new TermQuery(new Term(FIELD, VALUE));
expected.setBoost(BOOST);
final int numLayers = atLeast(3);
boolean needBoost = true;
Query actual = new TermQuery(new Term(FIELD, VALUE));
for (int i = 0; i < numLayers; i++) {
if (needBoost && 0 == TestUtil.nextInt(random(),0,numLayers)) {
needBoost = false;
actual.setBoost(BOOST);
}
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(actual, random().nextBoolean()
? BooleanClause.Occur.SHOULD : BooleanClause.Occur.MUST);
actual = bq.build();
}
if (needBoost) {
actual.setBoost(BOOST);
}
assertEquals(numLayers + ": " + actual.toString(),
expected, actual.rewrite(r));
expected, new IndexSearcher(r).rewrite(actual));
r.close();
dir.close();
@ -462,7 +449,6 @@ public class TestBooleanQuery extends LuceneTestCase {
}
bq2Builder.setMinimumNumberShouldMatch(bq.getMinimumNumberShouldMatch());
BooleanQuery bq2 = bq2Builder.build();
bq2.setBoost(bq.getBoost());
final AtomicBoolean matched = new AtomicBoolean();
searcher.search(bq, new SimpleCollector() {
@ -515,7 +501,6 @@ public class TestBooleanQuery extends LuceneTestCase {
BooleanQuery.Builder qBuilder = new BooleanQuery.Builder();
BooleanQuery q = qBuilder.build();
q.setBoost(random().nextFloat());
qBuilder.add(new TermQuery(new Term("field", "a")), Occur.FILTER);
// With a single clause, we will rewrite to the underlying
@ -526,7 +511,6 @@ public class TestBooleanQuery extends LuceneTestCase {
// Make sure it returns null scores
qBuilder.add(new TermQuery(new Term("field", "b")), Occur.FILTER);
q = qBuilder.build();
q.setBoost(random().nextFloat());
assertSameScoresWithoutFilters(searcher, q);
// Now with a scoring clause, we need to make sure that
@ -534,7 +518,6 @@ public class TestBooleanQuery extends LuceneTestCase {
// query
qBuilder.add(new TermQuery(new Term("field", "c")), Occur.SHOULD);
q = qBuilder.build();
q.setBoost(random().nextFloat());
assertSameScoresWithoutFilters(searcher, q);
// FILTER and empty SHOULD
@ -542,7 +525,6 @@ public class TestBooleanQuery extends LuceneTestCase {
qBuilder.add(new TermQuery(new Term("field", "a")), Occur.FILTER);
qBuilder.add(new TermQuery(new Term("field", "e")), Occur.SHOULD);
q = qBuilder.build();
q.setBoost(random().nextFloat());
assertSameScoresWithoutFilters(searcher, q);
// mix of FILTER and MUST
@ -550,7 +532,6 @@ public class TestBooleanQuery extends LuceneTestCase {
qBuilder.add(new TermQuery(new Term("field", "a")), Occur.FILTER);
qBuilder.add(new TermQuery(new Term("field", "d")), Occur.MUST);
q = qBuilder.build();
q.setBoost(random().nextFloat());
assertSameScoresWithoutFilters(searcher, q);
// FILTER + minShouldMatch
@ -560,7 +541,6 @@ public class TestBooleanQuery extends LuceneTestCase {
qBuilder.add(new TermQuery(new Term("field", "d")), Occur.SHOULD);
qBuilder.setMinimumNumberShouldMatch(1);
q = qBuilder.build();
q.setBoost(random().nextFloat());
assertSameScoresWithoutFilters(searcher, q);
reader.close();
@ -585,8 +565,8 @@ public class TestBooleanQuery extends LuceneTestCase {
// Single clauses rewrite to a term query
final Query rewritten1 = query1.build().rewrite(reader);
assertTrue(rewritten1 instanceof ConstantScoreQuery);
assertEquals(0f, rewritten1.getBoost(), 0f);
assertTrue(rewritten1 instanceof BoostQuery);
assertEquals(0f, ((BoostQuery) rewritten1).getBoost(), 0f);
// When there are two clauses, we cannot rewrite, but if one of them creates
// a null scorer we will end up with a single filter scorer and will need to

View File

@ -0,0 +1,70 @@
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.util.LuceneTestCase;
public class TestBoostQuery extends LuceneTestCase {
public void testEquals() {
final float boost = random().nextFloat() * 3 - 1;
BoostQuery q1 = new BoostQuery(new MatchAllDocsQuery(), boost);
BoostQuery q2 = new BoostQuery(new MatchAllDocsQuery(), boost);
assertEquals(q1, q2);
assertEquals(q1.getBoost(), q2.getBoost(), 0f);
float boost2 = boost;
while (boost == boost2) {
boost2 = random().nextFloat() * 3 - 1;
}
BoostQuery q3 = new BoostQuery(new MatchAllDocsQuery(), boost2);
assertFalse(q1.equals(q3));
assertFalse(q1.hashCode() == q3.hashCode());
}
public void testToString() {
assertEquals("foo:bar^2.0", new BoostQuery(new TermQuery(new Term("foo", "bar")), 2).toString());
BooleanQuery bq = new BooleanQuery.Builder()
.add(new TermQuery(new Term("foo", "bar")), Occur.SHOULD)
.add(new TermQuery(new Term("foo", "baz")), Occur.SHOULD)
.build();
assertEquals("(foo:bar foo:baz)^2.0", new BoostQuery(bq, 2).toString());
}
public void testRewrite() throws IOException {
IndexSearcher searcher = new IndexSearcher(new MultiReader());
// inner queries are rewritten
Query q = new BoostQuery(new MatchNoDocsQuery(), 2);
assertEquals(new BoostQuery(new BooleanQuery.Builder().build(), 2), searcher.rewrite(q));
// boosts are merged
q = new BoostQuery(new BoostQuery(new MatchAllDocsQuery(), 3), 2);
assertEquals(new BoostQuery(new MatchAllDocsQuery(), 6), searcher.rewrite(q));
// scores are not computed when the boost is 0
q = new BoostQuery(new MatchAllDocsQuery(), 0);
assertEquals(new BoostQuery(new ConstantScoreQuery(new MatchAllDocsQuery()), 0), searcher.rewrite(q));
}
}

View File

@ -73,12 +73,10 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
.add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST)
.add(matchTheseItems(new int[] {1,3}), Occur.FILTER)
.build();
t.setBoost(1000);
q.add(t, Occur.SHOULD);
q.add(new BoostQuery(t, 1000), Occur.SHOULD);
t = new ConstantScoreQuery(matchTheseItems(new int[] {0,2}));
t.setBoost(30);
q.add(t, Occur.SHOULD);
q.add(new BoostQuery(t, 30), Occur.SHOULD);
DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
dm.add(snear(st("w2"),
@ -135,12 +133,10 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
.add(new TermQuery(new Term(FIELD, "xx")), Occur.MUST)
.add(matchTheseItems(new int[] {1,3}), Occur.FILTER)
.build();
t.setBoost(1000);
q.add(t, Occur.SHOULD);
q.add(new BoostQuery(t, 1000), Occur.SHOULD);
t = new ConstantScoreQuery(matchTheseItems(new int[] {0,2}));
t.setBoost(-20.0f);
q.add(t, Occur.SHOULD);
q.add(new BoostQuery(t, -20), Occur.SHOULD);
DisjunctionMaxQuery dm = new DisjunctionMaxQuery(0.2f);
dm.add(snear(st("w2"),
@ -175,9 +171,8 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
builder.add(snear("w2","w3",1,true), Occur.SHOULD);
builder.add(snear("w1","w3",3,true), Occur.SHOULD);
BooleanQuery b = builder.build();
b.setBoost(0.0f);
q.add(b, Occur.SHOULD);
q.add(new BoostQuery(b, 0), Occur.SHOULD);
qtest(q.build(), new int[] { 0,1,2 });
}
@ -193,21 +188,18 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
public void testT3() throws Exception {
TermQuery query = new TermQuery(new Term(FIELD, "w1"));
query.setBoost(0);
bqtest(query, new int[] { 0,1,2,3 });
bqtest(new BoostQuery(query, 0), new int[] { 0,1,2,3 });
}
public void testMA3() throws Exception {
Query q=new MatchAllDocsQuery();
q.setBoost(0);
bqtest(q, new int[] { 0,1,2,3 });
bqtest(new BoostQuery(q, 0), new int[] { 0,1,2,3 });
}
public void testFQ5() throws Exception {
TermQuery query = new TermQuery(new Term(FIELD, "xx"));
query.setBoost(0);
Query filtered = new BooleanQuery.Builder()
.add(query, Occur.MUST)
.add(new BoostQuery(query, 0), Occur.MUST)
.add(matchTheseItems(new int[] {1,3}), Occur.FILTER)
.build();
bqtest(filtered, new int[] {3});
@ -215,8 +207,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
public void testCSQ4() throws Exception {
Query q = new ConstantScoreQuery(matchTheseItems(new int[] {3}));
q.setBoost(0);
bqtest(q, new int[] {3});
bqtest(new BoostQuery(q, 0), new int[] {3});
}
public void testDMQ10() throws Exception {
@ -225,17 +216,14 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
BooleanQuery.Builder query = new BooleanQuery.Builder();;
query.add(new TermQuery(new Term(FIELD, "yy")), Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
boostedQuery.setBoost(100);
query.add(boostedQuery, Occur.SHOULD);
query.add(new BoostQuery(boostedQuery, 100), Occur.SHOULD);
q.add(query.build());
TermQuery xxBoostedQuery = new TermQuery(new Term(FIELD, "xx"));
xxBoostedQuery.setBoost(0);
q.add(xxBoostedQuery);
q.setBoost(0.0f);
bqtest(q, new int[] { 0,2,3 });
q.add(new BoostQuery(xxBoostedQuery, 0));
bqtest(new BoostQuery(q, 0), new int[] { 0,2,3 });
}
public void testMPQ7() throws Exception {
@ -243,8 +231,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
q.add(ta(new String[] {"w1"}));
q.add(ta(new String[] {"w2"}));
q.setSlop(1);
q.setBoost(0.0f);
bqtest(q, new int[] { 0,1,2 });
bqtest(new BoostQuery(q, 0), new int[] { 0,1,2 });
}
public void testBQ12() throws Exception {
@ -252,8 +239,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
BooleanQuery.Builder query = new BooleanQuery.Builder();;
query.add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w2"));
boostedQuery.setBoost(0);
query.add(boostedQuery, Occur.SHOULD);
query.add(new BoostQuery(boostedQuery, 0), Occur.SHOULD);
qtest(query.build(), new int[] { 0,1,2,3 });
}
@ -262,8 +248,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
BooleanQuery.Builder query = new BooleanQuery.Builder();;
query.add(new TermQuery(new Term(FIELD, "w1")), Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
boostedQuery.setBoost(0);
query.add(boostedQuery, Occur.MUST_NOT);
query.add(new BoostQuery(boostedQuery, 0), Occur.MUST_NOT);
qtest(query.build(), new int[] { 1,2,3 });
}
@ -271,8 +256,7 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
// NOTE: using qtest not bqtest
BooleanQuery.Builder query = new BooleanQuery.Builder();;
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
boostedQuery.setBoost(0);
query.add(boostedQuery, Occur.MUST);
query.add(new BoostQuery(boostedQuery, 0), Occur.MUST);
query.add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
qtest(query.build(), new int[] { 0,1,2,3 });
@ -283,66 +267,57 @@ public class TestComplexExplanations extends BaseExplanationTestCase {
builder.add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
Query query = builder.build();
query.setBoost(0);
bqtest(query, new int[] { 0,1,2,3 });
bqtest(new BoostQuery(query, 0), new int[] { 0,1,2,3 });
}
public void testBQ22() throws Exception {
BooleanQuery.Builder builder = new BooleanQuery.Builder();;
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
boostedQuery.setBoost(0);
builder.add(boostedQuery, Occur.MUST);
builder.add(new BoostQuery(boostedQuery, 0), Occur.MUST);
builder.add(new TermQuery(new Term(FIELD, "w2")), Occur.SHOULD);
BooleanQuery query = builder.build();
query.setBoost(0);
bqtest(query, new int[] { 0,1,2,3 });
bqtest(new BoostQuery(query, 0), new int[] { 0,1,2,3 });
}
public void testST3() throws Exception {
SpanQuery q = st("w1");
q.setBoost(0);
bqtest(q, new int[] {0,1,2,3});
bqtest(new SpanBoostQuery(q, 0), new int[] {0,1,2,3});
}
public void testST6() throws Exception {
SpanQuery q = st("xx");
q.setBoost(0);
qtest(q, new int[] {2,3});
qtest(new SpanBoostQuery(q, 0), new int[] {2,3});
}
public void testSF3() throws Exception {
SpanQuery q = sf(("w1"),1);
q.setBoost(0);
bqtest(q, new int[] {0,1,2,3});
bqtest(new SpanBoostQuery(q, 0), new int[] {0,1,2,3});
}
public void testSF7() throws Exception {
SpanQuery q = sf(("xx"),3);
q.setBoost(0);
bqtest(q, new int[] {2,3});
bqtest(new SpanBoostQuery(q, 0), new int[] {2,3});
}
public void testSNot3() throws Exception {
SpanQuery q = snot(sf("w1",10),st("QQ"));
q.setBoost(0);
bqtest(q, new int[] {0,1,2,3});
bqtest(new SpanBoostQuery(q, 0), new int[] {0,1,2,3});
}
public void testSNot6() throws Exception {
SpanQuery q = snot(sf("w1",10),st("xx"));
q.setBoost(0);
bqtest(q, new int[] {0,1,2,3});
bqtest(new SpanBoostQuery(q, 0), new int[] {0,1,2,3});
}
public void testSNot8() throws Exception {
// NOTE: using qtest not bqtest
SpanQuery f = snear("w1","w3",10,true);
f.setBoost(0);
f = new SpanBoostQuery(f, 0);
SpanQuery q = snot(f, st("xx"));
qtest(q, new int[] {0,1,3});
}
public void testSNot9() throws Exception {
// NOTE: using qtest not bqtest
SpanQuery t = st("xx");
t.setBoost(0);
t = new SpanBoostQuery(t, 0);
SpanQuery q = snot(snear("w1","w3",10,true), t);
qtest(q, new int[] {0,1,3});
}

View File

@ -94,7 +94,7 @@ public class TestConjunctions extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost,
public SimWeight computeWeight(
CollectionStatistics collectionStats, TermStatistics... termStats) {
return new SimWeight() {
@Override

View File

@ -110,17 +110,14 @@ public class TestConstantScoreQuery extends LuceneTestCase {
}
});
final Query csq1 = new ConstantScoreQuery(new TermQuery(new Term ("field", "term")));
csq1.setBoost(2.0f);
final Query csq2 = new ConstantScoreQuery(csq1);
csq2.setBoost(5.0f);
final BoostQuery csq1 = new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term ("field", "term"))), 2f);
final BoostQuery csq2 = new BoostQuery(new ConstantScoreQuery(csq1), 5f);
final BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(csq1, BooleanClause.Occur.SHOULD);
bq.add(csq2, BooleanClause.Occur.SHOULD);
final Query csqbq = new ConstantScoreQuery(bq.build());
csqbq.setBoost(17.0f);
final BoostQuery csqbq = new BoostQuery(new ConstantScoreQuery(bq.build()), 17f);
checkHits(searcher, csq1, csq1.getBoost(), TermScorer.class);
checkHits(searcher, csq2, csq2.getBoost(), TermScorer.class);
@ -212,16 +209,19 @@ public class TestConstantScoreQuery extends LuceneTestCase {
IndexReader r = w.getReader();
w.close();
Filter filter = new QueryWrapperFilter(AssertingQuery.wrap(random(), new TermQuery(new Term("field", "a"))));
final Query wrapped = AssertingQuery.wrap(random(), new TermQuery(new Term("field", "a")));
Filter filter = new QueryWrapperFilter(wrapped);
IndexSearcher s = newSearcher(r);
assert s instanceof AssertingIndexSearcher;
// this used to fail
s.search(new ConstantScoreQuery(filter), new TotalHitCountCollector());
// check the rewrite
Query rewritten = new ConstantScoreQuery(filter).rewrite(r);
assertTrue(rewritten instanceof ConstantScoreQuery);
assertTrue(((ConstantScoreQuery) rewritten).getQuery() instanceof AssertingQuery);
Query rewritten = filter;
for (Query q = rewritten.rewrite(r); q != rewritten; q = rewritten.rewrite(r)) {
rewritten = q;
}
assertEquals(new BoostQuery(new ConstantScoreQuery(wrapped), 0), rewritten);
r.close();
d.close();

View File

@ -517,8 +517,7 @@ public class TestDisjunctionMaxQuery extends LuceneTestCase {
/** macro */
protected Query tq(String f, String t, float b) {
Query q = tq(f, t);
q.setBoost(b);
return q;
return new BoostQuery(q, b);
}
protected void printHits(String test, ScoreDoc[] h, IndexSearcher searcher)

View File

@ -153,8 +153,8 @@ public class TestDocValuesScoring extends LuceneTestCase {
}
@Override
public SimWeight computeWeight(float queryBoost, CollectionStatistics collectionStats, TermStatistics... termStats) {
return sim.computeWeight(queryBoost, collectionStats, termStats);
public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) {
return sim.computeWeight(collectionStats, termStats);
}
@Override

View File

@ -121,8 +121,7 @@ public class TestElevationComparator extends LuceneTestCase {
// System.out.println(" pri doc=" + vals[i+1] + " pri=" + (1+max));
}
BooleanQuery q = b.build();
q.setBoost(0);
return q;
return new BoostQuery(q, 0f);
}
private Document adoc(String[] vals) {

View File

@ -139,15 +139,12 @@ public class TestFieldValueQuery extends LuceneTestCase {
iw.close();
final float boost = random().nextFloat() * 10;
final Query ref = new ConstantScoreQuery(new TermQuery(new Term("has_value", "yes")));
ref.setBoost(boost);
final Query ref = new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("has_value", "yes"))), boost);
final Query q1 = new FieldValueQuery("dv1");
q1.setBoost(boost);
final Query q1 = new BoostQuery(new FieldValueQuery("dv1"), boost);
assertSameMatches(searcher, ref, q1, true);
final Query q2 = new FieldValueQuery("dv2");
q2.setBoost(boost);
final Query q2 = new BoostQuery(new FieldValueQuery("dv2"), boost);
assertSameMatches(searcher, ref, q2, true);
reader.close();

View File

@ -240,16 +240,14 @@ public class TestLRUQueryCache extends LuceneTestCase {
final IndexSearcher searcher = newSearcher(reader);
final Query query1 = new TermQuery(new Term("color", "blue"));
query1.setBoost(random().nextFloat());
// different instance yet equal
final Query query2 = new TermQuery(new Term("color", "blue"));
query2.setBoost(random().nextFloat());
final LRUQueryCache queryCache = new LRUQueryCache(Integer.MAX_VALUE, Long.MAX_VALUE);
searcher.setQueryCache(queryCache);
searcher.setQueryCachingPolicy(QueryCachingPolicy.ALWAYS_CACHE);
searcher.search(new ConstantScoreQuery(query1), 1);
searcher.search(new BoostQuery(new ConstantScoreQuery(query1), random().nextFloat()), 1);
assertEquals(1, queryCache.cachedQueries().size());
queryCache.clearQuery(query2);
@ -467,8 +465,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
Query[] queries = new Query[10 + random().nextInt(10)];
for (int i = 0; i < queries.length; ++i) {
queries[i] = new TermQuery(new Term("color", RandomPicks.randomFrom(random(), Arrays.asList("red", "blue", "green", "yellow"))));
queries[i].setBoost(random().nextFloat());
queries[i] = new BoostQuery(new TermQuery(new Term("color", RandomPicks.randomFrom(random(), Arrays.asList("red", "blue", "green", "yellow")))), random().nextFloat());
}
searcher.setQueryCache(queryCache);
@ -476,7 +473,11 @@ public class TestLRUQueryCache extends LuceneTestCase {
for (int i = 0; i < 20; ++i) {
final int idx = random().nextInt(queries.length);
searcher.search(new ConstantScoreQuery(queries[idx]), 1);
actualCounts.put(queries[idx], 1 + actualCounts.getOrDefault(queries[idx], 0));
Query cacheKey = queries[idx];
while (cacheKey instanceof BoostQuery) {
cacheKey = ((BoostQuery) cacheKey).getQuery();
}
actualCounts.put(cacheKey, 1 + actualCounts.getOrDefault(cacheKey, 0));
}
assertEquals(actualCounts, expectedCounts);
@ -743,9 +744,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
public void testUseRewrittenQueryAsCacheKey() throws IOException {
final Query expectedCacheKey = new TermQuery(new Term("foo", "bar"));
final BooleanQuery.Builder query = new BooleanQuery.Builder();
final Query sub = expectedCacheKey.clone();
sub.setBoost(42);
query.add(sub, Occur.MUST);
query.add(new BoostQuery(expectedCacheKey, 42f), Occur.MUST);
final LRUQueryCache queryCache = new LRUQueryCache(1000000, 10000000);
Directory dir = newDirectory();
@ -762,13 +761,13 @@ public class TestLRUQueryCache extends LuceneTestCase {
@Override
public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
assertEquals(expectedCacheKey, QueryCache.cacheKey(query));
assertEquals(expectedCacheKey, query);
return true;
}
@Override
public void onUse(Query query) {
assertEquals(expectedCacheKey, QueryCache.cacheKey(query));
assertEquals(expectedCacheKey, query);
}
};

View File

@ -90,8 +90,6 @@ public class TestMatchAllDocsQuery extends LuceneTestCase {
Query q1 = new MatchAllDocsQuery();
Query q2 = new MatchAllDocsQuery();
assertTrue(q1.equals(q2));
q1.setBoost(1.5f);
assertFalse(q1.equals(q2));
}
private void addDoc(String text, IndexWriter iw, float boost) throws IOException {

View File

@ -334,7 +334,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
boolean success = ords.add(ord);
assert success; // no dups
TermContext context = TermContext.build(reader.getContext(), term);
SimWeight w = weight.similarity.computeWeight(1f,
SimWeight w = weight.similarity.computeWeight(
searcher.collectionStatistics("field"),
searcher.termStatistics(term, context));
w.getValueForNormalization(); // ignored

View File

@ -225,8 +225,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
search.setSimilarity(new DefaultSimilarity());
Query q = csrq("data", "1", "6", T, T);
q.setBoost(100);
search.search(q, new SimpleCollector() {
search.search(new BoostQuery(q, 100), new SimpleCollector() {
private int base = 0;
private Scorer scorer;
@Override
@ -252,8 +251,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
// Ensure that boosting works to score one clause of a query higher
// than another.
//
Query q1 = csrq("data", "A", "A", T, T); // matches document #0
q1.setBoost(.1f);
Query q1 = new BoostQuery(csrq("data", "A", "A", T, T), .1f); // matches document #0
Query q2 = csrq("data", "Z", "Z", T, T); // matches document #1
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
@ -265,8 +263,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
Assert.assertEquals(0, hits[1].doc);
assertTrue(hits[0].score > hits[1].score);
q1 = csrq("data", "A", "A", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE); // matches document #0
q1.setBoost(.1f);
q1 = new BoostQuery(csrq("data", "A", "A", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE), .1f); // matches document #0
q2 = csrq("data", "Z", "Z", T, T, MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE); // matches document #1
bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
@ -278,8 +275,7 @@ public class TestMultiTermConstantScore extends BaseTestRangeFilter {
Assert.assertEquals(0, hits[1].doc);
assertTrue(hits[0].score > hits[1].score);
q1 = csrq("data", "A", "A", T, T); // matches document #0
q1.setBoost(10f);
q1 = new BoostQuery(csrq("data", "A", "A", T, T), 10f); // matches document #0
q2 = csrq("data", "Z", "Z", T, T); // matches document #1
bq = new BooleanQuery.Builder();
bq.add(q1, BooleanClause.Occur.SHOULD);

View File

@ -143,9 +143,10 @@ public class TestMultiTermQueryRewrites extends LuceneTestCase {
private void checkBooleanQueryBoosts(BooleanQuery bq) {
for (BooleanClause clause : bq.clauses()) {
final TermQuery mtq = (TermQuery) clause.getQuery();
final BoostQuery boostQ = (BoostQuery) clause.getQuery();
final TermQuery mtq = (TermQuery) boostQ.getQuery();
assertEquals("Parallel sorting of boosts in rewrite mode broken",
Float.parseFloat(mtq.getTerm().text()), mtq.getBoost(), 0);
Float.parseFloat(mtq.getTerm().text()), boostQ.getBoost(), 0);
}
}

View File

@ -76,14 +76,6 @@ public class TestNGramPhraseQuery extends LuceneTestCase {
PhraseQuery rewritten3 = (PhraseQuery) q;
assertArrayEquals(new Term[]{new Term("f", "ABC"), new Term("f", "DEF"), new Term("f", "FGH")}, rewritten3.getTerms());
assertArrayEquals(new int[]{0, 3, 5}, rewritten3.getPositions());
// LUCENE-4970: boosting test
NGramPhraseQuery pq4 = new NGramPhraseQuery(2, new PhraseQuery("f", "AB", "BC", "CD"));
pq4.setBoost(100.0F);
q = pq4.rewrite(reader);
assertNotSame(pq4, q);
assertEquals(pq4.getBoost(), q.getBoost(), 0.1f);
}
}

View File

@ -28,7 +28,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
@ -143,7 +142,7 @@ public class TestNeedsScores extends LuceneTestCase {
public Query rewrite(IndexReader reader) throws IOException {
Query in2 = in.rewrite(reader);
if (in2 == in) {
return this;
return super.rewrite(reader);
} else {
return new AssertNeedsScores(in2, value);
}

View File

@ -372,9 +372,6 @@ public class TestPhraseQuery extends LuceneTestCase {
builder.setSlop(5);
q = builder.build();
assertEquals("field:\"? hi|hello ? ? ? test\"~5", q.toString());
q.setBoost(2);
assertEquals("field:\"? hi|hello ? ? ? test\"~5^2.0", q.toString());
}
public void testWrappedPhrase() throws IOException {

View File

@ -35,7 +35,6 @@ import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;

View File

@ -36,8 +36,7 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
}
public void testT2() throws Exception {
TermQuery termQuery = new TermQuery(new Term(FIELD, "w1"));
termQuery.setBoost(100);
qtest(termQuery, new int[] { 0,1,2,3 });
qtest(new BoostQuery(termQuery, 100), new int[] { 0,1,2,3 });
}
/* MatchAllDocs */
@ -47,8 +46,7 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
}
public void testMA2() throws Exception {
Query q=new MatchAllDocsQuery();
q.setBoost(1000);
qtest(q, new int[] { 0,1,2,3 });
qtest(new BoostQuery(q, 1000), new int[] { 0,1,2,3 });
}
/* some simple phrase tests */
@ -94,8 +92,7 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
}
public void testCSQ3() throws Exception {
Query q = new ConstantScoreQuery(matchTheseItems(new int[] {0,2}));
q.setBoost(1000);
qtest(q, new int[] {0,2});
qtest(new BoostQuery(q, 1000), new int[] {0,2});
}
/* DisjunctionMaxQuery */
@ -164,13 +161,11 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
booleanQuery.add(new TermQuery(new Term(FIELD, "yy")), BooleanClause.Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
boostedQuery.setBoost(100);
booleanQuery.add(boostedQuery, BooleanClause.Occur.SHOULD);
booleanQuery.add(new BoostQuery(boostedQuery, 100), BooleanClause.Occur.SHOULD);
q.add(booleanQuery.build());
TermQuery xxBoostedQuery = new TermQuery(new Term(FIELD, "xx"));
xxBoostedQuery.setBoost(100000);
q.add(xxBoostedQuery);
q.add(new BoostQuery(xxBoostedQuery, 100000));
qtest(q, new int[] { 0,2,3 });
}
@ -181,13 +176,11 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
booleanQuery.add(new TermQuery(new Term(FIELD, "yy")), BooleanClause.Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w5"));
boostedQuery.setBoost(100);
booleanQuery.add(boostedQuery, BooleanClause.Occur.SHOULD);
booleanQuery.add(new BoostQuery(boostedQuery, 100), BooleanClause.Occur.SHOULD);
q.add(booleanQuery.build());
TermQuery xxBoostedQuery = new TermQuery(new Term(FIELD, "xx"));
xxBoostedQuery.setBoost(0);
q.add(xxBoostedQuery);
q.add(new BoostQuery(xxBoostedQuery, 0));
qtest(q, new int[] { 0,2,3 });
}
@ -374,8 +367,7 @@ public class TestSimpleExplanations extends BaseExplanationTestCase {
BooleanQuery.Builder query = new BooleanQuery.Builder();
query.add(new TermQuery(new Term(FIELD, "w1")), BooleanClause.Occur.SHOULD);
TermQuery boostedQuery = new TermQuery(new Term(FIELD, "w1"));
boostedQuery.setBoost(1000);
query.add(boostedQuery, BooleanClause.Occur.SHOULD);
query.add(new BoostQuery(boostedQuery, 1000), BooleanClause.Occur.SHOULD);
qtest(query.build(), new int[] { 0,1,2,3 });
}

View File

@ -202,4 +202,16 @@ public class TestSimpleSearchEquivalence extends SearchEquivalenceTestBase {
PhraseQuery q2 = builder.build();
assertSameScores(q1, q2);
}
public void testBoostQuerySimplification() throws Exception {
float b1 = random().nextFloat() * 10;
float b2 = random().nextFloat() * 10;
Term term = randomTerm();
Query q1 = new BoostQuery(new BoostQuery(new TermQuery(term), b2), b1);
// Use AssertingQuery to prevent BoostQuery from merging inner and outer boosts
Query q2 = new BoostQuery(new AssertingQuery(random(), new BoostQuery(new TermQuery(term), b2)), b1);
assertSameScores(q1, q2);
}
}

View File

@ -159,17 +159,12 @@ public class TestTermRangeQuery extends LuceneTestCase {
public void testEqualsHashcode() {
Query query = TermRangeQuery.newStringRange("content", "A", "C", true, true);
query.setBoost(1.0f);
Query other = TermRangeQuery.newStringRange("content", "A", "C", true, true);
other.setBoost(1.0f);
assertEquals("query equals itself is true", query, query);
assertEquals("equivalent queries are equal", query, other);
assertEquals("hashcode must return same value when equals is true", query.hashCode(), other.hashCode());
other.setBoost(2.0f);
assertFalse("Different boost queries are not equal", query.equals(other));
other = TermRangeQuery.newStringRange("notcontent", "A", "C", true, true);
assertFalse("Different fields are not equal", query.equals(other));

View File

@ -190,11 +190,11 @@ public class TestTopFieldCollector extends LuceneTestCase {
text.setStringValue("baz");
w.addDocument(doc);
IndexReader reader = w.getReader();
TermQuery foo = new TermQuery(new Term("text", "foo"));
TermQuery bar = new TermQuery(new Term("text", "bar"));
bar.setBoost(2);
TermQuery baz = new TermQuery(new Term("text", "baz"));
baz.setBoost(3);
Query foo = new TermQuery(new Term("text", "foo"));
Query bar = new TermQuery(new Term("text", "bar"));
foo = new BoostQuery(foo, 2);
Query baz = new TermQuery(new Term("text", "baz"));
baz = new BoostQuery(baz, 3);
Query query = new BooleanQuery.Builder()
.add(foo, Occur.SHOULD)
.add(bar, Occur.SHOULD)

View File

@ -30,19 +30,6 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
assertFalse(UsageTrackingQueryCachingPolicy.isCostly(new TermQuery(new Term("field", "value"))));
}
public void testBoostIgnored() {
Query q1 = new TermQuery(new Term("foo", "bar"));
q1.setBoost(2);
Query q2 = q1.clone();
q2.setBoost(3);
Query q3 = q1.clone();
q3.setBoost(4);
UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();
policy.onUse(q1);
policy.onUse(q2);
assertEquals(2, policy.frequency(q3));
}
public void testNeverCacheMatchAll() throws Exception {
Query q = new MatchAllDocsQuery();
UsageTrackingQueryCachingPolicy policy = new UsageTrackingQueryCachingPolicy();

View File

@ -70,22 +70,16 @@ public class TestWildcard extends LuceneTestCase {
assertMatches(searcher, wq, 1);
wq.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
wq.setBoost(0.1F);
Query q = searcher.rewrite(wq);
assertTrue(q instanceof TermQuery);
assertEquals(q.getBoost(), wq.getBoost(), 0);
wq.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_REWRITE);
wq.setBoost(0.2F);
q = searcher.rewrite(wq);
assertTrue(q instanceof MultiTermQueryConstantScoreWrapper);
assertEquals(q.getBoost(), wq.getBoost(), 0.1);
wq.setRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_REWRITE);
wq.setBoost(0.4F);
q = searcher.rewrite(wq);
assertTrue(q instanceof ConstantScoreQuery);
assertEquals(q.getBoost(), wq.getBoost(), 0.1);
reader.close();
indexStore.close();
}

View File

@ -20,6 +20,7 @@ package org.apache.lucene.search.payloads;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BaseExplanationTestCase;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.spans.SpanBoostQuery;
import org.apache.lucene.search.spans.SpanNearQuery;
import org.apache.lucene.search.spans.SpanOrQuery;
import org.apache.lucene.search.spans.SpanQuery;
@ -64,8 +65,7 @@ public class TestPayloadExplanations extends BaseExplanationTestCase {
public void testPT2() throws Exception {
for (PayloadFunction fn : functions) {
SpanQuery q = pt("w1", fn);
q.setBoost(1000);
qtest(q, new int[] {0,1,2,3});
qtest(new SpanBoostQuery(q, 1000), new int[] {0,1,2,3});
}
}
@ -78,8 +78,7 @@ public class TestPayloadExplanations extends BaseExplanationTestCase {
public void testPT5() throws Exception {
for (PayloadFunction fn : functions) {
SpanQuery q = pt("xx", fn);
q.setBoost(1000);
qtest(q, new int[] {2,3});
qtest(new SpanBoostQuery(q, 1000), new int[] {2,3});
}
}

View File

@ -165,7 +165,7 @@ public class TestSimilarityBase extends LuceneTestCase {
/** Creates the default statistics object that the specific tests modify. */
private BasicStats createStats() {
BasicStats stats = new BasicStats("spoof", 1);
BasicStats stats = new BasicStats("spoof");
stats.setNumberOfDocuments(NUMBER_OF_DOCUMENTS);
stats.setNumberOfFieldTokens(NUMBER_OF_FIELD_TOKENS);
stats.setAvgFieldLength(AVG_FIELD_LENGTH);
@ -189,9 +189,10 @@ public class TestSimilarityBase extends LuceneTestCase {
*/
private void unitTestCore(BasicStats stats, float freq, int docLen) {
for (SimilarityBase sim : sims) {
BasicStats realStats = (BasicStats) sim.computeWeight(stats.getTotalBoost(),
BasicStats realStats = (BasicStats) sim.computeWeight(
toCollectionStats(stats),
toTermStats(stats));
realStats.normalize(1f, stats.getBoost());
float score = sim.score(realStats, freq, docLen);
float explScore = sim.explain(
realStats, 1, Explanation.match(freq, "freq"), docLen).getValue();
@ -521,9 +522,10 @@ public class TestSimilarityBase extends LuceneTestCase {
*/
private void correctnessTestCore(SimilarityBase sim, float gold) {
BasicStats stats = createStats();
BasicStats realStats = (BasicStats) sim.computeWeight(stats.getTotalBoost(),
BasicStats realStats = (BasicStats) sim.computeWeight(
toCollectionStats(stats),
toTermStats(stats));
realStats.normalize(1f, stats.getBoost());
float score = sim.score(realStats, FREQ, DOC_LEN);
assertEquals(
sim.toString() + " score not correct.", gold, score, FLOAT_EPSILON);

View File

@ -136,7 +136,6 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
public void testRewrite0() throws Exception {
SpanQuery q = new FieldMaskingSpanQuery
(new SpanTermQuery(new Term("last", "sally")) , "first");
q.setBoost(8.7654321f);
SpanQuery qr = (SpanQuery) searcher.rewrite(q);
QueryUtils.checkEqual(q, qr);
@ -195,16 +194,6 @@ public class TestFieldMaskingSpanQuery extends LuceneTestCase {
QueryUtils.checkUnequal(q1, q3);
QueryUtils.checkUnequal(q1, q4);
QueryUtils.checkUnequal(q1, q5);
SpanQuery qA = new FieldMaskingSpanQuery
(new SpanTermQuery(new Term("last", "sally")) , "first");
qA.setBoost(9f);
SpanQuery qB = new FieldMaskingSpanQuery
(new SpanTermQuery(new Term("last", "sally")) , "first");
QueryUtils.checkUnequal(qA, qB);
qB.setBoost(9f);
QueryUtils.checkEqual(qA, qB);
}
public void testNoop0() throws Exception {

Some files were not shown because too many files have changed in this diff Show More