Removed Scorer#getWeight (#13440)

If Caller requires Weight then they have to keep track of Weight with which Scorer was created in the first place instead of relying on Scorer.

Closes #13410
This commit is contained in:
Sanjay Dutt 2024-06-06 19:33:19 +05:30 committed by GitHub
parent d5aa88bd7e
commit d0d2aa274f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
124 changed files with 231 additions and 843 deletions

View File

@ -110,6 +110,8 @@ API Changes
I/O for top-level disjunctions. Weight#bulkScorer() still exists for compatibility, but delegates
to ScorerSupplier#bulkScorer(). (Adrien Grand)
* GITHUB#13410: Removed Scorer#getWeight (Sanjay Dutt, Adrien Grand)
New Features
---------------------

View File

@ -779,3 +779,17 @@ to manage the indexed data on their own and create new `Facet` implementations t
The `Weight#scorerSupplier` method is now declared abstract, compelling child classes to implement the ScorerSupplier
interface. Additionally, `Weight#scorer` is now declared final, with its implementation being delegated to
`Weight#scorerSupplier` for the scorer.
### Reference to `weight` is removed from Scorer (GITHUB#13410)
The `weight` has been removed from the Scorer class. Consequently, the constructor, `Scorer(Weight)`,and a getter,
`Scorer#getWeight`, has also been eliminated. References to weight have also been removed from nearly all the subclasses
of Scorer, including ConstantScoreScorer, TermScorer, and others.
Additionally, several APIs have been modified to remove the weight reference, as it is no longer necessary.
Specifically, the method `FunctionValues#getScorer(Weight weight, LeafReaderContext readerContext)` has been updated to
`FunctionValues#getScorer(LeafReaderContext readerContext)`.
Callers must now keep track of the Weight instance that created the Scorer if they need it, instead of relying on
Scorer.

View File

@ -25,7 +25,6 @@ import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier;
@ -59,7 +58,6 @@ abstract class BaseShapeDocValuesQuery extends SpatialQuery {
LeafReader reader,
SpatialVisitor spatialVisitor,
ScoreMode scoreMode,
ConstantScoreWeight weight,
float boost,
float score)
throws IOException {
@ -89,7 +87,7 @@ abstract class BaseShapeDocValuesQuery extends SpatialQuery {
@Override
public Scorer get(long leadCost) {
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
}
@Override

View File

@ -135,7 +135,7 @@ abstract class BinaryRangeFieldRangeQuery extends Query {
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, iterator);
final var scorer = new ConstantScoreScorer(score(), scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -153,7 +153,7 @@ final class LatLonDocValuesBoxQuery extends Query {
return 5; // 5 comparisons
}
};
final var scorer = new ConstantScoreScorer(this, boost, scoreMode, iterator);
final var scorer = new ConstantScoreScorer(boost, scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -158,7 +158,7 @@ class LatLonDocValuesQuery extends Query {
throw new IllegalArgumentException(
"Invalid query relationship:[" + queryRelation + "]");
}
final var scorer = new ConstantScoreScorer(this, boost, scoreMode, iterator);
final var scorer = new ConstantScoreScorer(boost, scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -215,14 +215,12 @@ final class LatLonPointDistanceFeatureQuery extends Query {
final SortedNumericDocValues multiDocValues =
DocValues.getSortedNumeric(context.reader(), field);
final NumericDocValues docValues = selectValues(multiDocValues);
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
return new DistanceScorer(
weight, context.reader().maxDoc(), leadCost, boost, pointValues, docValues);
context.reader().maxDoc(), leadCost, boost, pointValues, docValues);
}
@Override
@ -258,13 +256,11 @@ final class LatLonPointDistanceFeatureQuery extends Query {
private double maxDistance = GeoUtils.EARTH_MEAN_RADIUS_METERS * Math.PI;
protected DistanceScorer(
Weight weight,
int maxDoc,
long leadCost,
float boost,
PointValues pointValues,
NumericDocValues docValues) {
super(weight);
this.maxDoc = maxDoc;
this.leadCost = leadCost;
this.boost = boost;

View File

@ -137,7 +137,6 @@ final class LatLonPointDistanceQuery extends Query {
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
final IntersectVisitor visitor = getIntersectVisitor(result);
final Weight weight = this;
return new ScorerSupplier() {
long cost = -1;
@ -155,10 +154,10 @@ final class LatLonPointDistanceQuery extends Query {
long[] cost = new long[] {reader.maxDoc()};
values.intersect(getInverseIntersectVisitor(result, cost));
final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
values.intersect(visitor);
return new ConstantScoreScorer(weight, score(), scoreMode, result.build().iterator());
return new ConstantScoreScorer(score(), scoreMode, result.build().iterator());
}
@Override

View File

@ -205,14 +205,12 @@ final class LongDistanceFeatureQuery extends Query {
final SortedNumericDocValues multiDocValues =
DocValues.getSortedNumeric(context.reader(), field);
final NumericDocValues docValues = selectValues(multiDocValues);
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
return new DistanceScorer(
weight, context.reader().maxDoc(), leadCost, boost, pointValues, docValues);
context.reader().maxDoc(), leadCost, boost, pointValues, docValues);
}
@Override
@ -236,13 +234,11 @@ final class LongDistanceFeatureQuery extends Query {
private long maxDistance = Long.MAX_VALUE;
protected DistanceScorer(
Weight weight,
int maxDoc,
long leadCost,
float boost,
PointValues pointValues,
NumericDocValues docValues) {
super(weight);
this.maxDoc = maxDoc;
this.leadCost = leadCost;
this.boost = boost;

View File

@ -459,13 +459,12 @@ public abstract class RangeFieldQuery extends Query {
allDocsMatch = true;
}
final Weight weight = this;
if (allDocsMatch) {
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) {
return new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.all(reader.maxDoc()));
score(), scoreMode, DocIdSetIterator.all(reader.maxDoc()));
}
@Override
@ -484,7 +483,7 @@ public abstract class RangeFieldQuery extends Query {
public Scorer get(long leadCost) throws IOException {
values.intersect(visitor);
DocIdSetIterator iterator = result.build().iterator();
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override

View File

@ -149,7 +149,7 @@ final class SortedNumericDocValuesRangeQuery extends Query {
}
};
}
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, iterator);
final var scorer = new ConstantScoreScorer(score(), scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}
};

View File

@ -150,7 +150,7 @@ final class SortedNumericDocValuesSetQuery extends Query implements Accountable
}
};
}
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, iterator);
final var scorer = new ConstantScoreScorer(score(), scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}
};

View File

@ -110,7 +110,6 @@ final class SortedSetDocValuesRangeQuery extends Query {
return new ConstantScoreWeight(this, boost) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
if (context.reader().getFieldInfos().fieldInfo(field) == null) {
return null;
}
@ -151,7 +150,7 @@ final class SortedSetDocValuesRangeQuery extends Query {
// no terms matched in this segment
if (minOrd > maxOrd) {
return new ConstantScoreScorer(weight, score(), scoreMode, DocIdSetIterator.empty());
return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.empty());
}
final SortedDocValues singleton = DocValues.unwrapSingleton(values);
@ -193,7 +192,7 @@ final class SortedSetDocValuesRangeQuery extends Query {
}
};
}
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override

View File

@ -150,7 +150,6 @@ abstract class SpatialQuery extends Query {
LeafReader reader,
SpatialVisitor spatialVisitor,
ScoreMode scoreMode,
ConstantScoreWeight weight,
float boost,
float score)
throws IOException {
@ -178,8 +177,7 @@ abstract class SpatialQuery extends Query {
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) {
return new ConstantScoreScorer(
weight, score, scoreMode, DocIdSetIterator.all(reader.maxDoc()));
return new ConstantScoreScorer(score, scoreMode, DocIdSetIterator.all(reader.maxDoc()));
}
@Override
@ -200,7 +198,7 @@ abstract class SpatialQuery extends Query {
return new RelationScorerSupplier(values, spatialVisitor, queryRelation, field) {
@Override
public Scorer get(long leadCost) throws IOException {
return getScorer(reader, weight, score, scoreMode);
return getScorer(reader, score, scoreMode);
}
};
}
@ -215,7 +213,7 @@ abstract class SpatialQuery extends Query {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final LeafReader reader = context.reader();
return getScorerSupplier(reader, spatialVisitor, scoreMode, this, boost, score());
return getScorerSupplier(reader, spatialVisitor, scoreMode, boost, score());
}
@Override
@ -291,18 +289,17 @@ abstract class SpatialQuery extends Query {
}
protected Scorer getScorer(
final LeafReader reader, final Weight weight, final float boost, final ScoreMode scoreMode)
throws IOException {
final LeafReader reader, final float boost, final ScoreMode scoreMode) throws IOException {
switch (queryRelation) {
case INTERSECTS:
return getSparseScorer(reader, weight, boost, scoreMode);
return getSparseScorer(reader, boost, scoreMode);
case CONTAINS:
return getContainsDenseScorer(reader, weight, boost, scoreMode);
return getContainsDenseScorer(reader, boost, scoreMode);
case WITHIN:
case DISJOINT:
return values.getDocCount() == values.size()
? getSparseScorer(reader, weight, boost, scoreMode)
: getDenseScorer(reader, weight, boost, scoreMode);
? getSparseScorer(reader, boost, scoreMode)
: getDenseScorer(reader, boost, scoreMode);
default:
throw new IllegalArgumentException("Unsupported query type :[" + queryRelation + "]");
}
@ -310,8 +307,7 @@ abstract class SpatialQuery extends Query {
/** Scorer used for INTERSECTS and single value points */
private Scorer getSparseScorer(
final LeafReader reader, final Weight weight, final float boost, final ScoreMode scoreMode)
throws IOException {
final LeafReader reader, final float boost, final ScoreMode scoreMode) throws IOException {
if (queryRelation == QueryRelation.DISJOINT
&& values.getDocCount() == reader.maxDoc()
&& values.getDocCount() == values.size()
@ -324,7 +320,7 @@ abstract class SpatialQuery extends Query {
final long[] cost = new long[] {reader.maxDoc()};
values.intersect(getInverseDenseVisitor(spatialVisitor, queryRelation, result, cost));
final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
} else if (values.getDocCount() < (values.size() >>> 2)) {
// we use a dense structure so we can skip already visited documents
final FixedBitSet result = new FixedBitSet(reader.maxDoc());
@ -333,18 +329,17 @@ abstract class SpatialQuery extends Query {
assert cost[0] > 0 || result.cardinality() == 0;
final DocIdSetIterator iterator =
cost[0] == 0 ? DocIdSetIterator.empty() : new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
} else {
final DocIdSetBuilder docIdSetBuilder = new DocIdSetBuilder(reader.maxDoc(), values, field);
values.intersect(getSparseVisitor(spatialVisitor, queryRelation, docIdSetBuilder));
final DocIdSetIterator iterator = docIdSetBuilder.build().iterator();
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
}
}
/** Scorer used for WITHIN and DISJOINT */
private Scorer getDenseScorer(
LeafReader reader, Weight weight, final float boost, ScoreMode scoreMode)
private Scorer getDenseScorer(LeafReader reader, final float boost, ScoreMode scoreMode)
throws IOException {
final FixedBitSet result = new FixedBitSet(reader.maxDoc());
final long[] cost;
@ -369,11 +364,10 @@ abstract class SpatialQuery extends Query {
assert cost[0] > 0 || result.cardinality() == 0;
final DocIdSetIterator iterator =
cost[0] == 0 ? DocIdSetIterator.empty() : new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
}
private Scorer getContainsDenseScorer(
LeafReader reader, Weight weight, final float boost, ScoreMode scoreMode)
private Scorer getContainsDenseScorer(LeafReader reader, final float boost, ScoreMode scoreMode)
throws IOException {
final FixedBitSet result = new FixedBitSet(reader.maxDoc());
final long[] cost = new long[] {0};
@ -385,7 +379,7 @@ abstract class SpatialQuery extends Query {
assert cost[0] > 0 || result.cardinality() == 0;
final DocIdSetIterator iterator =
cost[0] == 0 ? DocIdSetIterator.empty() : new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, boost, scoreMode, iterator);
return new ConstantScoreScorer(boost, scoreMode, iterator);
}
@Override

View File

@ -130,7 +130,7 @@ public class XYDocValuesPointInGeometryQuery extends Query {
return 1000f; // TODO: what should it be?
}
};
final var scorer = new ConstantScoreScorer(this, boost, scoreMode, iterator);
final var scorer = new ConstantScoreScorer(boost, scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -141,7 +141,6 @@ final class XYPointInGeometryQuery extends Query {
return null;
}
XYPointField.checkCompatible(fieldInfo);
final Weight weight = this;
return new ScorerSupplier() {
@ -152,7 +151,7 @@ final class XYPointInGeometryQuery extends Query {
@Override
public Scorer get(long leadCost) throws IOException {
values.intersect(visitor);
return new ConstantScoreScorer(weight, score(), scoreMode, result.build().iterator());
return new ConstantScoreScorer(score(), scoreMode, result.build().iterator());
}
@Override

View File

@ -388,7 +388,7 @@ abstract class AbstractKnnVectorQuery extends Query {
return null;
}
final var scorer =
new Scorer(this) {
new Scorer() {
final int lower = segmentStarts[context.ord];
final int upper = segmentStarts[context.ord + 1];
int upTo = -1;

View File

@ -208,7 +208,7 @@ abstract class AbstractMultiTermQueryConstantScoreWrapper<Q extends MultiTermQue
if (iterator == null) {
return null;
}
return new ConstantScoreScorer(this, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override
@ -232,8 +232,6 @@ abstract class AbstractMultiTermQueryConstantScoreWrapper<Q extends MultiTermQue
}
final long cost = estimateCost(terms, q.getTermsCount());
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
@ -252,8 +250,7 @@ abstract class AbstractMultiTermQueryConstantScoreWrapper<Q extends MultiTermQue
// find that there are actually no hits, we need to return an empty Scorer as opposed
// to null:
return Objects.requireNonNullElseGet(
scorer,
() -> new ConstantScoreScorer(weight, score(), scoreMode, DocIdSetIterator.empty()));
scorer, () -> new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.empty()));
}
@Override
@ -267,7 +264,7 @@ abstract class AbstractMultiTermQueryConstantScoreWrapper<Q extends MultiTermQue
} else {
bulkScorer =
new DefaultBulkScorer(
new ConstantScoreScorer(weight, score(), scoreMode, weightOrIterator.iterator));
new ConstantScoreScorer(score(), scoreMode, weightOrIterator.iterator));
}
// It's against the API contract to return a null scorer from a non-null ScoreSupplier.
@ -278,8 +275,7 @@ abstract class AbstractMultiTermQueryConstantScoreWrapper<Q extends MultiTermQue
bulkScorer,
() ->
new DefaultBulkScorer(
new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.empty())));
new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.empty())));
}
@Override

View File

@ -201,8 +201,7 @@ abstract class AbstractVectorSimilarityQuery extends Query {
final DocIdSetIterator iterator;
final float[] cachedScore;
VectorSimilarityScorer(Weight weight, DocIdSetIterator iterator, float[] cachedScore) {
super(weight);
VectorSimilarityScorer(DocIdSetIterator iterator, float[] cachedScore) {
this.iterator = iterator;
this.cachedScore = cachedScore;
}
@ -253,7 +252,7 @@ abstract class AbstractVectorSimilarityQuery extends Query {
}
};
return new VectorSimilarityScorer(weight, iterator, cachedScore);
return new VectorSimilarityScorer(iterator, cachedScore);
}
static VectorSimilarityScorer fromAcceptDocs(
@ -282,7 +281,7 @@ abstract class AbstractVectorSimilarityQuery extends Query {
}
};
return new VectorSimilarityScorer(weight, iterator, cachedScore);
return new VectorSimilarityScorer(iterator, cachedScore);
}
@Override

View File

@ -34,8 +34,7 @@ final class BlockMaxConjunctionScorer extends Scorer {
float minScore;
/** Create a new {@link BlockMaxConjunctionScorer} from scoring clauses. */
BlockMaxConjunctionScorer(Weight weight, Collection<Scorer> scorersList) throws IOException {
super(weight);
BlockMaxConjunctionScorer(Collection<Scorer> scorersList) throws IOException {
this.scorers = scorersList.toArray(new Scorer[scorersList.size()]);
// Sort scorer by cost
Arrays.sort(this.scorers, Comparator.comparingLong(s -> s.iterator().cost()));

View File

@ -31,8 +31,6 @@ import org.apache.lucene.search.Weight.DefaultBulkScorer;
import org.apache.lucene.util.Bits;
final class BooleanScorerSupplier extends ScorerSupplier {
private final Weight weight;
private final Map<BooleanClause.Occur, Collection<ScorerSupplier>> subs;
private final ScoreMode scoreMode;
private final int minShouldMatch;
@ -65,7 +63,6 @@ final class BooleanScorerSupplier extends ScorerSupplier {
== 0) {
throw new IllegalArgumentException("There should be at least one positive clause");
}
this.weight = weight;
this.subs = subs;
this.scoreMode = scoreMode;
this.minShouldMatch = minShouldMatch;
@ -121,8 +118,8 @@ final class BooleanScorerSupplier extends ScorerSupplier {
// no scoring clauses but scores are needed so we wrap the scorer in
// a constant score in order to allow early termination
return scorer.twoPhaseIterator() != null
? new ConstantScoreScorer(weight, 0f, scoreMode, scorer.twoPhaseIterator())
: new ConstantScoreScorer(weight, 0f, scoreMode, scorer.iterator());
? new ConstantScoreScorer(0f, scoreMode, scorer.twoPhaseIterator())
: new ConstantScoreScorer(0f, scoreMode, scorer.iterator());
}
return scorer;
}
@ -159,7 +156,7 @@ final class BooleanScorerSupplier extends ScorerSupplier {
subs.get(Occur.MUST_NOT),
leadCost);
Scorer opt = opt(subs.get(Occur.SHOULD), minShouldMatch, scoreMode, leadCost, false);
return new ConjunctionScorer(weight, Arrays.asList(req, opt), Arrays.asList(req, opt));
return new ConjunctionScorer(Arrays.asList(req, opt), Arrays.asList(req, opt));
} else {
assert scoreMode.needsScores();
return new ReqOptSumScorer(
@ -237,7 +234,7 @@ final class BooleanScorerSupplier extends ScorerSupplier {
Scorer prohibitedScorer =
prohibited.size() == 1
? prohibited.get(0)
: new DisjunctionSumScorer(weight, prohibited, ScoreMode.COMPLETE_NO_SCORES);
: new DisjunctionSumScorer(prohibited, ScoreMode.COMPLETE_NO_SCORES);
return new ReqExclBulkScorer(positiveScorer, prohibitedScorer);
}
}
@ -349,8 +346,7 @@ final class BooleanScorerSupplier extends ScorerSupplier {
return new ConjunctionBulkScorer(requiredScoring, requiredNoScoring);
}
if (scoreMode == ScoreMode.TOP_SCORES && requiredScoring.size() > 1) {
requiredScoring =
Collections.singletonList(new BlockMaxConjunctionScorer(weight, requiredScoring));
requiredScoring = Collections.singletonList(new BlockMaxConjunctionScorer(requiredScoring));
}
Scorer conjunctionScorer;
if (requiredNoScoring.size() + requiredScoring.size() == 1) {
@ -378,7 +374,7 @@ final class BooleanScorerSupplier extends ScorerSupplier {
List<Scorer> required = new ArrayList<>();
required.addAll(requiredScoring);
required.addAll(requiredNoScoring);
conjunctionScorer = new ConjunctionScorer(weight, required, requiredScoring);
conjunctionScorer = new ConjunctionScorer(required, requiredScoring);
}
return new DefaultBulkScorer(conjunctionScorer);
}
@ -433,14 +429,14 @@ final class BooleanScorerSupplier extends ScorerSupplier {
scoringScorers.add(scorer);
}
if (scoreMode == ScoreMode.TOP_SCORES && scoringScorers.size() > 1 && topLevelScoringClause) {
Scorer blockMaxScorer = new BlockMaxConjunctionScorer(weight, scoringScorers);
Scorer blockMaxScorer = new BlockMaxConjunctionScorer(scoringScorers);
if (requiredScorers.isEmpty()) {
return blockMaxScorer;
}
scoringScorers = Collections.singletonList(blockMaxScorer);
}
requiredScorers.addAll(scoringScorers);
return new ConjunctionScorer(weight, requiredScorers, scoringScorers);
return new ConjunctionScorer(requiredScorers, scoringScorers);
}
}
@ -477,9 +473,9 @@ final class BooleanScorerSupplier extends ScorerSupplier {
// However, as WANDScorer uses more complex algorithm and data structure, we would like to
// still use DisjunctionSumScorer to handle exhaustive pure disjunctions, which may be faster
if ((scoreMode == ScoreMode.TOP_SCORES && topLevelScoringClause) || minShouldMatch > 1) {
return new WANDScorer(weight, optionalScorers, minShouldMatch, scoreMode);
return new WANDScorer(optionalScorers, minShouldMatch, scoreMode);
} else {
return new DisjunctionSumScorer(weight, optionalScorers, scoreMode);
return new DisjunctionSumScorer(optionalScorers, scoreMode);
}
}
}

View File

@ -31,8 +31,7 @@ class ConjunctionScorer extends Scorer {
* Create a new {@link ConjunctionScorer}, note that {@code scorers} must be a subset of {@code
* required}.
*/
ConjunctionScorer(Weight weight, Collection<Scorer> required, Collection<Scorer> scorers) {
super(weight);
ConjunctionScorer(Collection<Scorer> required, Collection<Scorer> scorers) {
assert required.containsAll(scorers);
this.disi = ConjunctionUtils.intersectScorers(required);
this.scorers = scorers.toArray(new Scorer[scorers.size()]);

View File

@ -149,10 +149,9 @@ public final class ConstantScoreQuery extends Query {
final Scorer innerScorer = innerScorerSupplier.get(leadCost);
final TwoPhaseIterator twoPhaseIterator = innerScorer.twoPhaseIterator();
if (twoPhaseIterator == null) {
return new ConstantScoreScorer(
innerWeight, score(), scoreMode, innerScorer.iterator());
return new ConstantScoreScorer(score(), scoreMode, innerScorer.iterator());
} else {
return new ConstantScoreScorer(innerWeight, score(), scoreMode, twoPhaseIterator);
return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator);
}
}

View File

@ -64,14 +64,11 @@ public final class ConstantScoreScorer extends Scorer {
* Constructor based on a {@link DocIdSetIterator} which will be used to drive iteration. Two
* phase iteration will not be supported.
*
* @param weight the parent weight
* @param score the score to return on each document
* @param scoreMode the score mode
* @param disi the iterator that defines matching documents
*/
public ConstantScoreScorer(
Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator disi) {
super(weight);
public ConstantScoreScorer(float score, ScoreMode scoreMode, DocIdSetIterator disi) {
this.score = score;
this.scoreMode = scoreMode;
// TODO: Only wrap when it is the top-level scoring clause? See
@ -86,14 +83,11 @@ public final class ConstantScoreScorer extends Scorer {
* Constructor based on a {@link TwoPhaseIterator}. In that case the {@link Scorer} will support
* two-phase iteration.
*
* @param weight the parent weight
* @param score the score to return on each document
* @param scoreMode the score mode
* @param twoPhaseIterator the iterator that defines matching documents
*/
public ConstantScoreScorer(
Weight weight, float score, ScoreMode scoreMode, TwoPhaseIterator twoPhaseIterator) {
super(weight);
public ConstantScoreScorer(float score, ScoreMode scoreMode, TwoPhaseIterator twoPhaseIterator) {
this.score = score;
this.scoreMode = scoreMode;
if (scoreMode == ScoreMode.TOP_SCORES) {

View File

@ -142,7 +142,6 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
} else if (scorerSuppliers.size() == 1) {
return scorerSuppliers.get(0);
} else {
final Weight thisWeight = this;
return new ScorerSupplier() {
private long cost = -1;
@ -153,7 +152,7 @@ public final class DisjunctionMaxQuery extends Query implements Iterable<Query>
for (ScorerSupplier ss : scorerSuppliers) {
scorers.add(ss.get(leadCost));
}
return new DisjunctionMaxScorer(thisWeight, tieBreakerMultiplier, scorers, scoreMode);
return new DisjunctionMaxScorer(tieBreakerMultiplier, scorers, scoreMode);
}
@Override

View File

@ -36,15 +36,13 @@ final class DisjunctionMaxScorer extends DisjunctionScorer {
/**
* Creates a new instance of DisjunctionMaxScorer
*
* @param weight The Weight to be used.
* @param tieBreakerMultiplier Multiplier applied to non-maximum-scoring subqueries for a document
* as they are summed into the result.
* @param subScorers The sub scorers this Scorer should iterate on
*/
DisjunctionMaxScorer(
Weight weight, float tieBreakerMultiplier, List<Scorer> subScorers, ScoreMode scoreMode)
DisjunctionMaxScorer(float tieBreakerMultiplier, List<Scorer> subScorers, ScoreMode scoreMode)
throws IOException {
super(weight, subScorers, scoreMode);
super(subScorers, scoreMode);
this.subScorers = subScorers;
this.tieBreakerMultiplier = tieBreakerMultiplier;
if (tieBreakerMultiplier < 0 || tieBreakerMultiplier > 1) {

View File

@ -31,9 +31,7 @@ abstract class DisjunctionScorer extends Scorer {
private final DocIdSetIterator approximation;
private final TwoPhase twoPhase;
protected DisjunctionScorer(Weight weight, List<Scorer> subScorers, ScoreMode scoreMode)
throws IOException {
super(weight);
protected DisjunctionScorer(List<Scorer> subScorers, ScoreMode scoreMode) throws IOException {
if (subScorers.size() <= 1) {
throw new IllegalArgumentException("There must be at least 2 subScorers");
}

View File

@ -28,12 +28,10 @@ final class DisjunctionSumScorer extends DisjunctionScorer {
/**
* Construct a <code>DisjunctionScorer</code>.
*
* @param weight The weight to be used.
* @param subScorers Array of at least two subscorers.
*/
DisjunctionSumScorer(Weight weight, List<Scorer> subScorers, ScoreMode scoreMode)
throws IOException {
super(weight, subScorers, scoreMode);
DisjunctionSumScorer(List<Scorer> subScorers, ScoreMode scoreMode) throws IOException {
super(subScorers, scoreMode);
this.scorers = subScorers;
}

View File

@ -153,7 +153,6 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
return null; // no values/docs so nothing can match
}
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
@ -164,8 +163,7 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
if (termsEnum.next() == null) {
// no matching terms
return new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.empty());
return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.empty());
}
// Create a bit set for the "term set" ordinals (these are the terms provided by the
@ -184,8 +182,7 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
// no terms matched in this segment
if (maxOrd < 0) {
return new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.empty());
return new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.empty());
}
final SortedDocValues singleton = DocValues.unwrapSingleton(values);
@ -227,7 +224,7 @@ public final class DocValuesRewriteMethod extends MultiTermQuery.RewriteMethod {
};
}
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override

View File

@ -213,7 +213,7 @@ public class FieldExistsQuery extends Query {
if (iterator == null) {
return null;
}
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, iterator);
final var scorer = new ConstantScoreScorer(score(), scoreMode, iterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -30,23 +30,11 @@ public abstract class FilterScorer extends Scorer implements Unwrappable<Scorer>
protected final Scorer in;
/**
* Create a new FilterScorer
* Create a new FilterScorer with a specific weight
*
* @param in the {@link Scorer} to wrap
*/
public FilterScorer(Scorer in) {
super(in.weight);
this.in = in;
}
/**
* Create a new FilterScorer with a specific weight
*
* @param in the {@link Scorer} to wrap
* @param weight a {@link Weight}
*/
public FilterScorer(Scorer in, Weight weight) {
super(weight);
if (in == null) {
throw new NullPointerException("wrapped Scorer must not be null");
}

View File

@ -158,14 +158,13 @@ public class IndexSortSortedNumericDocValuesRangeQuery extends Query {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
IteratorAndCount itAndCount = getDocIdSetIteratorOrNull(context);
if (itAndCount != null) {
DocIdSetIterator disi = itAndCount.it;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
return new ConstantScoreScorer(weight, score(), scoreMode, disi);
return new ConstantScoreScorer(score(), scoreMode, disi);
}
@Override

View File

@ -25,9 +25,9 @@ import java.util.List;
*/
public class IndriAndScorer extends IndriDisjunctionScorer {
protected IndriAndScorer(Weight weight, List<Scorer> subScorers, ScoreMode scoreMode, float boost)
protected IndriAndScorer(List<Scorer> subScorers, ScoreMode scoreMode, float boost)
throws IOException {
super(weight, subScorers, scoreMode, boost);
super(subScorers, scoreMode, boost);
}
@Override

View File

@ -59,7 +59,7 @@ public class IndriAndWeight extends Weight {
}
Scorer scorer = subScorers.get(0);
if (subScorers.size() > 1) {
scorer = new IndriAndScorer(this, subScorers, scoreMode, boost);
scorer = new IndriAndScorer(subScorers, scoreMode, boost);
}
return scorer;
}

View File

@ -30,9 +30,8 @@ public abstract class IndriDisjunctionScorer extends IndriScorer {
private final DisiPriorityQueue subScorers;
private final DocIdSetIterator approximation;
protected IndriDisjunctionScorer(
Weight weight, List<Scorer> subScorersList, ScoreMode scoreMode, float boost) {
super(weight, boost);
protected IndriDisjunctionScorer(List<Scorer> subScorersList, ScoreMode scoreMode, float boost) {
super(boost);
this.subScorersList = subScorersList;
this.subScorers = new DisiPriorityQueue(subScorersList.size());
for (Scorer scorer : subScorersList) {

View File

@ -26,8 +26,7 @@ public abstract class IndriScorer extends Scorer {
private float boost;
protected IndriScorer(Weight weight, float boost) {
super(weight);
protected IndriScorer(float boost) {
this.boost = boost;
}

View File

@ -779,8 +779,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
disi = DocIdSetIterator.empty();
}
return new ConstantScoreScorer(
CachingWrapperWeight.this, 0f, ScoreMode.COMPLETE_NO_SCORES, disi);
return new ConstantScoreScorer(0f, ScoreMode.COMPLETE_NO_SCORES, disi);
}
@Override
@ -805,8 +804,7 @@ public class LRUQueryCache implements QueryCache, Accountable {
return new ScorerSupplier() {
@Override
public Scorer get(long LeadCost) throws IOException {
return new ConstantScoreScorer(
CachingWrapperWeight.this, 0f, ScoreMode.COMPLETE_NO_SCORES, disi);
return new ConstantScoreScorer(0f, ScoreMode.COMPLETE_NO_SCORES, disi);
}
@Override

View File

@ -33,13 +33,12 @@ public final class MatchAllDocsQuery extends Query {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
return new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
}
@Override

View File

@ -132,6 +132,6 @@ final class MultiTermQueryConstantScoreBlendedWrapper<Q extends MultiTermQuery>
private static Scorer wrapWithDummyScorer(Weight weight, DocIdSetIterator disi) {
// The score and score mode do not actually matter here, except that using TOP_SCORES results
// in another wrapper object getting created around the disi, so we try to avoid that:
return new ConstantScoreScorer(weight, 1f, ScoreMode.COMPLETE_NO_SCORES, disi);
return new ConstantScoreScorer(1f, ScoreMode.COMPLETE_NO_SCORES, disi);
}
}

View File

@ -32,8 +32,7 @@ class PhraseScorer extends Scorer {
private float minCompetitiveScore = 0;
private float freq = 0;
PhraseScorer(Weight weight, PhraseMatcher matcher, ScoreMode scoreMode, LeafSimScorer simScorer) {
super(weight);
PhraseScorer(PhraseMatcher matcher, ScoreMode scoreMode, LeafSimScorer simScorer) {
this.matcher = matcher;
this.scoreMode = scoreMode;
this.simScorer = simScorer;
@ -103,9 +102,4 @@ class PhraseScorer extends Scorer {
public float getMaxScore(int upTo) throws IOException {
return maxScoreCache.getMaxScore(upTo);
}
@Override
public String toString() {
return "PhraseScorer(" + weight + ")";
}
}

View File

@ -65,7 +65,7 @@ public abstract class PhraseWeight extends Weight {
if (matcher == null) return null;
LeafSimScorer simScorer =
new LeafSimScorer(stats, context.reader(), field, scoreMode.needsScores());
final var scorer = new PhraseScorer(this, matcher, scoreMode, simScorer);
final var scorer = new PhraseScorer(matcher, scoreMode, simScorer);
return new DefaultScorerSupplier(scorer);
}

View File

@ -145,7 +145,6 @@ public abstract class PointInSetQuery extends Query implements Accountable {
return new ConstantScoreWeight(this, boost) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
LeafReader reader = context.reader();
PointValues values = reader.getPointValues(field);
@ -184,7 +183,7 @@ public abstract class PointInSetQuery extends Query implements Accountable {
DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field);
values.intersect(new MergePointVisitor(sortedPackedPoints, result));
DocIdSetIterator iterator = result.build().iterator();
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override
@ -221,7 +220,7 @@ public abstract class PointInSetQuery extends Query implements Accountable {
visitor.setPoint(point);
values.intersect(visitor);
}
return new ConstantScoreScorer(weight, score(), scoreMode, result.build().iterator());
return new ConstantScoreScorer(score(), scoreMode, result.build().iterator());
}
@Override

View File

@ -341,14 +341,13 @@ public abstract class PointRangeQuery extends Query {
allDocsMatch = false;
}
final Weight weight = this;
if (allDocsMatch) {
// all docs have a value and all points are within bounds, so everything matches
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) {
return new ConstantScoreScorer(
weight, score(), scoreMode, DocIdSetIterator.all(reader.maxDoc()));
score(), scoreMode, DocIdSetIterator.all(reader.maxDoc()));
}
@Override
@ -376,12 +375,12 @@ public abstract class PointRangeQuery extends Query {
long[] cost = new long[] {reader.maxDoc()};
values.intersect(getInverseIntersectVisitor(result, cost));
final DocIdSetIterator iterator = new BitSetIterator(result, cost[0]);
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
values.intersect(visitor);
DocIdSetIterator iterator = result.build().iterator();
return new ConstantScoreScorer(weight, score(), scoreMode, iterator);
return new ConstantScoreScorer(score(), scoreMode, iterator);
}
@Override

View File

@ -40,7 +40,6 @@ class ReqExclScorer extends Scorer {
* @param exclScorer indicates exclusion.
*/
public ReqExclScorer(Scorer reqScorer, Scorer exclScorer) {
super(reqScorer.weight);
this.reqScorer = reqScorer;
reqTwoPhaseIterator = reqScorer.twoPhaseIterator();
if (reqTwoPhaseIterator == null) {

View File

@ -48,7 +48,6 @@ class ReqOptSumScorer extends Scorer {
*/
public ReqOptSumScorer(Scorer reqScorer, Scorer optScorer, ScoreMode scoreMode)
throws IOException {
super(reqScorer.weight);
assert reqScorer != null;
assert optScorer != null;
this.reqScorer = reqScorer;

View File

@ -17,7 +17,6 @@
package org.apache.lucene.search;
import java.io.IOException;
import java.util.Objects;
/**
* Expert: Common scoring functionality for different types of queries.
@ -27,27 +26,6 @@ import java.util.Objects;
*/
public abstract class Scorer extends Scorable {
/** the Scorer's parent Weight */
protected final Weight weight;
/**
* Constructs a Scorer
*
* @param weight The scorers <code>Weight</code>.
*/
protected Scorer(Weight weight) {
this.weight = Objects.requireNonNull(weight);
}
/**
* returns parent Weight
*
* @lucene.experimental
*/
public Weight getWeight() {
return weight;
}
/** Returns the doc ID that is currently being scored. */
public abstract int docID();

View File

@ -309,9 +309,9 @@ public final class SynonymQuery extends Query {
if (iterators.size() == 1) {
final TermScorer scorer;
if (scoreMode == ScoreMode.TOP_SCORES) {
scorer = new TermScorer(this, impacts.get(0), simScorer);
scorer = new TermScorer(impacts.get(0), simScorer);
} else {
scorer = new TermScorer(this, iterators.get(0), simScorer);
scorer = new TermScorer(iterators.get(0), simScorer);
}
float boost = termBoosts.get(0);
synonymScorer =
@ -324,7 +324,7 @@ public final class SynonymQuery extends Query {
DisiPriorityQueue queue = new DisiPriorityQueue(iterators.size());
for (int i = 0; i < iterators.size(); i++) {
PostingsEnum postings = iterators.get(i);
final TermScorer termScorer = new TermScorer(this, postings, simScorer);
final TermScorer termScorer = new TermScorer(postings, simScorer);
float boost = termBoosts.get(i);
final DisiWrapperFreq wrapper = new DisiWrapperFreq(termScorer, boost);
queue.add(wrapper);
@ -348,7 +348,7 @@ public final class SynonymQuery extends Query {
iterator = impactsDisi;
}
synonymScorer = new SynonymScorer(this, queue, iterator, impactsDisi, simScorer);
synonymScorer = new SynonymScorer(queue, iterator, impactsDisi, simScorer);
}
return new DefaultScorerSupplier(synonymScorer);
}
@ -536,12 +536,10 @@ public final class SynonymQuery extends Query {
private final LeafSimScorer simScorer;
SynonymScorer(
Weight weight,
DisiPriorityQueue queue,
DocIdSetIterator iterator,
ImpactsDISI impactsDisi,
LeafSimScorer simScorer) {
super(weight);
this.queue = queue;
this.iterator = iterator;
this.maxScoreCache = impactsDisi.getMaxScoreCache();

View File

@ -141,7 +141,6 @@ public class TermQuery extends Query {
topLevelScoringClause);
} else {
return new TermScorer(
TermWeight.this,
termsEnum.postings(
null, scoreMode.needsScores() ? PostingsEnum.FREQS : PostingsEnum.NONE),
scorer);

View File

@ -34,8 +34,7 @@ public final class TermScorer extends Scorer {
private final MaxScoreCache maxScoreCache;
/** Construct a {@link TermScorer} that will iterate all documents. */
public TermScorer(Weight weight, PostingsEnum postingsEnum, LeafSimScorer docScorer) {
super(weight);
public TermScorer(PostingsEnum postingsEnum, LeafSimScorer docScorer) {
iterator = this.postingsEnum = postingsEnum;
ImpactsEnum impactsEnum = new SlowImpactsEnum(postingsEnum);
maxScoreCache = new MaxScoreCache(impactsEnum, docScorer.getSimScorer());
@ -52,7 +51,6 @@ public final class TermScorer extends Scorer {
ImpactsEnum impactsEnum,
LeafSimScorer docScorer,
boolean topLevelScoringClause) {
super(weight);
postingsEnum = impactsEnum;
maxScoreCache = new MaxScoreCache(impactsEnum, docScorer.getSimScorer());
if (topLevelScoringClause) {
@ -107,10 +105,4 @@ public final class TermScorer extends Scorer {
impactsDisi.setMinCompetitiveScore(minScore);
}
}
/** Returns a string representation of this <code>TermScorer</code>. */
@Override
public String toString() {
return "scorer(" + weight + ")[" + super.toString() + "]";
}
}

View File

@ -150,9 +150,8 @@ final class WANDScorer extends Scorer {
final ScoreMode scoreMode;
WANDScorer(Weight weight, Collection<Scorer> scorers, int minShouldMatch, ScoreMode scoreMode)
WANDScorer(Collection<Scorer> scorers, int minShouldMatch, ScoreMode scoreMode)
throws IOException {
super(weight);
if (minShouldMatch >= scorers.size()) {
throw new IllegalArgumentException("minShouldMatch should be < the number of scorers");

View File

@ -1046,7 +1046,7 @@ abstract class BaseKnnVectorQueryTestCase extends LuceneTestCase {
throw new UnsupportedOperationException("reusing BitSet is not supported");
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, bitSetIterator);
final var scorer = new ConstantScoreScorer(score(), scoreMode, bitSetIterator);
return new DefaultScorerSupplier(scorer);
}

View File

@ -164,11 +164,6 @@ final class JustCompileSearch {
}
static final class JustCompileScorer extends Scorer {
protected JustCompileScorer(Weight weight) {
super(weight);
}
@Override
public float score() {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);

View File

@ -57,7 +57,6 @@ public class TestBoolean2ScorerSupplier extends LuceneTestCase {
private final DocIdSetIterator it;
FakeScorer(long cost) {
super(new FakeWeight());
this.it = DocIdSetIterator.all(Math.toIntExact(cost));
}

View File

@ -256,9 +256,9 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
"ConjunctionScorer\n"
+ " MUST ConstantScoreScorer\n"
+ " MUST WANDScorer\n"
+ " SHOULD TermScorer body:crawler\n"
+ " SHOULD TermScorer body:web\n"
+ " SHOULD TermScorer body:nutch",
+ " SHOULD TermScorer\n"
+ " SHOULD TermScorer\n"
+ " SHOULD TermScorer",
summary);
}
}
@ -272,7 +272,7 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
assertEquals(1, scoreSummary.numHits.get());
assertFalse(scoreSummary.summaries.isEmpty());
for (String summary : scoreSummary.summaries) {
assertEquals("TermScorer body:nutch", summary);
assertEquals("TermScorer", summary);
}
}
@ -329,14 +329,6 @@ public class TestBooleanQueryVisitSubscorers extends LuceneTestCase {
private static void summarizeScorer(
final StringBuilder builder, final Scorable scorer, final int indent) throws IOException {
builder.append(scorer.getClass().getSimpleName());
if (scorer instanceof TermScorer) {
TermQuery termQuery = (TermQuery) ((Scorer) scorer).getWeight().getQuery();
builder
.append(" ")
.append(termQuery.getTerm().field())
.append(":")
.append(termQuery.getTerm().text());
}
for (final Scorable.ChildScorable childScorer : scorer.getChildren()) {
indent(builder, indent + 1).append(childScorer.relationship).append(" ");
summarizeScorer(builder, childScorer.child, indent + 2);

View File

@ -23,7 +23,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.lucene.tests.util.TestUtil;
import org.apache.lucene.util.BitDocIdSet;
@ -86,28 +85,6 @@ public class TestConjunctionDISI extends LuceneTestCase {
return scorer(TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator), twoPhaseIterator);
}
private static class FakeWeight extends Weight {
protected FakeWeight() {
super(new MatchNoDocsQuery());
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return null;
}
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
}
/**
* Create a {@link Scorer} that wraps the given {@link DocIdSetIterator}. It also accepts a {@link
* TwoPhaseIterator} view, which is exposed in {@link Scorer#twoPhaseIterator()}. When the
@ -116,7 +93,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
* ConjunctionDISI} takes advantage of the {@link TwoPhaseIterator} view.
*/
private static Scorer scorer(DocIdSetIterator it, TwoPhaseIterator twoPhaseIterator) {
return new Scorer(new FakeWeight()) {
return new Scorer() {
@Override
public DocIdSetIterator iterator() {
@ -237,17 +214,13 @@ public class TestConjunctionDISI extends LuceneTestCase {
sets[i] = set;
iterators[i] =
new ConstantScoreScorer(
new FakeWeight(),
0f,
ScoreMode.TOP_SCORES,
anonymizeIterator(new BitDocIdSet(set).iterator()));
0f, ScoreMode.TOP_SCORES, anonymizeIterator(new BitDocIdSet(set).iterator()));
break;
case 1:
// bitSet iterator
sets[i] = set;
iterators[i] =
new ConstantScoreScorer(
new FakeWeight(), 0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator());
new ConstantScoreScorer(0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator());
break;
default:
// scorer with approximation
@ -282,10 +255,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
sets[i] = set;
iterators[i] =
new ConstantScoreScorer(
new FakeWeight(),
0f,
ScoreMode.COMPLETE_NO_SCORES,
new BitDocIdSet(set).iterator());
0f, ScoreMode.COMPLETE_NO_SCORES, new BitDocIdSet(set).iterator());
} else {
// scorer with approximation
final FixedBitSet confirmed = clearRandomBits(set);
@ -328,17 +298,13 @@ public class TestConjunctionDISI extends LuceneTestCase {
sets[i] = set;
newIterator =
new ConstantScoreScorer(
new FakeWeight(),
0f,
ScoreMode.TOP_SCORES,
anonymizeIterator(new BitDocIdSet(set).iterator()));
0f, ScoreMode.TOP_SCORES, anonymizeIterator(new BitDocIdSet(set).iterator()));
break;
case 1:
// bitSet iterator
sets[i] = set;
newIterator =
new ConstantScoreScorer(
new FakeWeight(), 0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator());
new ConstantScoreScorer(0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator());
break;
default:
// scorer with approximation
@ -384,8 +350,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
// simple iterator
sets[i] = set;
scorers.add(
new ConstantScoreScorer(
new FakeWeight(), 0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator()));
new ConstantScoreScorer(0f, ScoreMode.TOP_SCORES, new BitDocIdSet(set).iterator()));
} else {
// scorer with approximation
final FixedBitSet confirmed = clearRandomBits(set);
@ -404,15 +369,11 @@ public class TestConjunctionDISI extends LuceneTestCase {
List<Scorer> subIterators = scorers.subList(subSeqStart, subSeqEnd);
Scorer subConjunction;
if (wrapWithScorer) {
subConjunction =
new ConjunctionScorer(new FakeWeight(), subIterators, Collections.emptyList());
subConjunction = new ConjunctionScorer(subIterators, Collections.emptyList());
} else {
subConjunction =
new ConstantScoreScorer(
new FakeWeight(),
0f,
ScoreMode.TOP_SCORES,
ConjunctionUtils.intersectScorers(subIterators));
0f, ScoreMode.TOP_SCORES, ConjunctionUtils.intersectScorers(subIterators));
}
scorers.set(subSeqStart, subConjunction);
int toRemove = subSeqEnd - subSeqStart - 1;
@ -423,8 +384,7 @@ public class TestConjunctionDISI extends LuceneTestCase {
if (scorers.size() == 1) {
// ConjunctionDISI needs two iterators
scorers.add(
new ConstantScoreScorer(
new FakeWeight(), 0f, ScoreMode.TOP_SCORES, DocIdSetIterator.all(maxDoc)));
new ConstantScoreScorer(0f, ScoreMode.TOP_SCORES, DocIdSetIterator.all(maxDoc)));
}
final DocIdSetIterator conjunction = ConjunctionUtils.intersectScorers(scorers);

View File

@ -19,6 +19,7 @@ package org.apache.lucene.search;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.analysis.Analyzer;
@ -147,15 +148,14 @@ public class TestConjunctions extends LuceneTestCase {
private final AtomicBoolean setScorerCalled = new AtomicBoolean(false);
@Override
public void setScorer(Scorable s) throws IOException {
Collection<Scorer.ChildScorable> childScorers = s.getChildren();
setScorerCalled.set(true);
assertEquals(2, childScorers.size());
public void setWeight(Weight weight) {
BooleanQuery query = (BooleanQuery) weight.getQuery();
List<BooleanClause> clauseList = query.clauses();
assertEquals(2, clauseList.size());
Set<String> terms = new HashSet<>();
for (Scorer.ChildScorable childScorer : childScorers) {
Query query = ((Scorer) childScorer.child).getWeight().getQuery();
assertTrue(query instanceof TermQuery);
Term term = ((TermQuery) query).getTerm();
for (BooleanClause clause : clauseList) {
assert (clause.query() instanceof TermQuery);
Term term = ((TermQuery) clause.query()).getTerm();
assertEquals("field", term.field());
terms.add(term.text());
}
@ -164,6 +164,13 @@ public class TestConjunctions extends LuceneTestCase {
assertTrue(terms.contains("b"));
}
@Override
public void setScorer(Scorable s) throws IOException {
Collection<Scorer.ChildScorable> childScorers = s.getChildren();
setScorerCalled.set(true);
assertEquals(2, childScorers.size());
}
@Override
public void collect(int doc) {}

View File

@ -205,10 +205,9 @@ public class TestConstantScoreScorer extends LuceneTestCase {
Scorer scorer = weight.scorer(context);
if (scorer.twoPhaseIterator() == null) {
return new ConstantScoreScorer(scorer.getWeight(), score, scoreMode, scorer.iterator());
return new ConstantScoreScorer(score, scoreMode, scorer.iterator());
} else {
return new ConstantScoreScorer(
scorer.getWeight(), score, scoreMode, scorer.twoPhaseIterator());
return new ConstantScoreScorer(score, scoreMode, scorer.twoPhaseIterator());
}
}

View File

@ -125,7 +125,7 @@ public class TestDisiPriorityQueue extends LuceneTestCase {
return new ConstantScoreWeight(this, boost) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, disi);
final var scorer = new ConstantScoreScorer(score(), scoreMode, disi);
return new DefaultScorerSupplier(scorer);
}

View File

@ -20,40 +20,15 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.tests.util.LuceneTestCase;
public class TestDisjunctionScoreBlockBoundaryPropagator extends LuceneTestCase {
private static class FakeWeight extends Weight {
FakeWeight() {
super(new MatchNoDocsQuery());
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
return null;
}
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
}
private static class FakeScorer extends Scorer {
final int boundary;
final float maxScore;
FakeScorer(int boundary, float maxScore) throws IOException {
super(new FakeWeight());
this.boundary = boundary;
this.maxScore = maxScore;
}

View File

@ -1629,8 +1629,7 @@ public class TestLRUQueryCache extends LuceneTestCase {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final var scorer =
new ConstantScoreScorer(this, boost, scoreMode, DocIdSetIterator.all(1));
final var scorer = new ConstantScoreScorer(boost, scoreMode, DocIdSetIterator.all(1));
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
@ -1722,13 +1721,12 @@ public class TestLRUQueryCache extends LuceneTestCase {
return new ConstantScoreWeight(this, 1) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final Weight weight = this;
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
scorerCreatedCount.incrementAndGet();
return new ConstantScoreScorer(
weight, 1, scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
1, scoreMode, DocIdSetIterator.all(context.reader().maxDoc()));
}
@Override

View File

@ -372,38 +372,6 @@ public class TestMaxScoreBulkScorer extends LuceneTestCase {
}
}
private static class FakeWeight extends Weight {
protected FakeWeight() {
super(null);
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return false;
}
@Override
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) {
return new ScorerSupplier() {
@Override
public Scorer get(long leadCost) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
};
}
}
private static class FakeScorer extends Scorer {
final String toString;
@ -413,7 +381,6 @@ public class TestMaxScoreBulkScorer extends LuceneTestCase {
int cost = 10;
protected FakeScorer(String toString) {
super(new FakeWeight());
this.toString = toString;
}

View File

@ -135,7 +135,7 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
}
return null;
}
return new BulkScorerWrapperScorer(weight, bulkScorer, TestUtil.nextInt(random(), 1, 100));
return new BulkScorerWrapperScorer(bulkScorer, TestUtil.nextInt(random(), 1, 100));
default:
throw new AssertionError();
}
@ -351,7 +351,6 @@ public class TestMinShouldMatch2 extends LuceneTestCase {
SlowMinShouldMatchScorer(BooleanWeight weight, LeafReader reader, IndexSearcher searcher)
throws IOException {
super(weight);
this.dv = reader.getSortedSetDocValues("dv");
this.maxDoc = reader.maxDoc();
BooleanQuery bq = (BooleanQuery) weight.getQuery();

View File

@ -19,7 +19,6 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.util.LuceneTestCase;
@ -29,10 +28,6 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
private static final class SimpleScorer extends Scorer {
private int idx = -1;
public SimpleScorer(Weight weight) {
super(weight);
}
@Override
public float score() {
return idx == scores.length ? Float.NaN : scores[idx];
@ -113,10 +108,7 @@ public class TestPositiveScoresOnlyCollector extends LuceneTestCase {
writer.commit();
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake =
new TermQuery(new Term("fake", "weight")).createWeight(searcher, ScoreMode.COMPLETE, 1f);
Scorer s = new SimpleScorer(fake);
Scorer s = new SimpleScorer();
TopDocsCollector<ScoreDoc> tdc = TopScoreDocCollector.create(scores.length, Integer.MAX_VALUE);
Collector c = new PositiveScoresOnlyCollector(tdc);
LeafCollector ac = c.getLeafCollector(ir.leaves().get(0));

View File

@ -495,7 +495,7 @@ public class TestQueryRescorer extends LuceneTestCase {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final var scorer =
new Scorer(this) {
new Scorer() {
int docID = -1;
@Override

View File

@ -19,7 +19,6 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.util.LuceneTestCase;
@ -30,10 +29,6 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
private int idx = 0;
private int doc = -1;
public SimpleScorer(Weight weight) {
super(weight);
}
@Override
public float score() {
// advance idx on purpose, so that consecutive calls to score will get
@ -145,10 +140,7 @@ public class TestScoreCachingWrappingScorer extends LuceneTestCase {
writer.commit();
IndexReader ir = writer.getReader();
writer.close();
IndexSearcher searcher = newSearcher(ir);
Weight fake =
new TermQuery(new Term("fake", "weight")).createWeight(searcher, ScoreMode.COMPLETE, 1f);
Scorer s = new SimpleScorer(fake);
Scorer s = new SimpleScorer();
ScoreCachingCollector scc = new ScoreCachingCollector(scores.length);
LeafCollector lc = scc.getLeafCollector(null);
lc.setScorer(s);

View File

@ -170,10 +170,7 @@ public class TestScorerPerf extends LuceneTestCase {
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final var scorer =
new ConstantScoreScorer(
this,
score(),
scoreMode,
new BitSetIterator(docs, docs.approximateCardinality()));
score(), scoreMode, new BitSetIterator(docs, docs.approximateCardinality()));
return new DefaultScorerSupplier(scorer);
}

View File

@ -268,10 +268,7 @@ public class TestSortRandom extends LuceneTestCase {
final var scorer =
new ConstantScoreScorer(
this,
score(),
scoreMode,
new BitSetIterator(bits, bits.approximateCardinality()));
score(), scoreMode, new BitSetIterator(bits, bits.approximateCardinality()));
return new DefaultScorerSupplier(scorer);
}

View File

@ -1,270 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.search;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.analysis.MockAnalyzer;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.tests.search.AssertingScorable;
import org.apache.lucene.tests.search.DisablingBulkScorerQuery;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestSubScorerFreqs extends LuceneTestCase {
private static Directory dir;
private static IndexSearcher s;
@BeforeClass
public static void makeIndex() throws Exception {
dir = new ByteBuffersDirectory();
RandomIndexWriter w =
new RandomIndexWriter(
random(),
dir,
newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
// make sure we have more than one segment occationally
int num = atLeast(31);
for (int i = 0; i < num; i++) {
Document doc = new Document();
doc.add(newTextField("f", "a b c d b c d c d d", Field.Store.NO));
w.addDocument(doc);
doc = new Document();
doc.add(newTextField("f", "a b c d", Field.Store.NO));
w.addDocument(doc);
}
s = newSearcher(w.getReader());
s.setSimilarity(new CountingSimilarity());
w.close();
}
@AfterClass
public static void finish() throws Exception {
s.getIndexReader().close();
s = null;
dir.close();
dir = null;
}
private static class CountingCollectorManager
implements CollectorManager<CountingCollector, Map<Integer, Map<Query, Float>>> {
private final Set<String> relationships;
CountingCollectorManager(Set<String> relationships) {
this.relationships = relationships;
}
@Override
public CountingCollector newCollector() {
TopScoreDocCollector topScoreDocCollector =
TopScoreDocCollector.create(10, Integer.MAX_VALUE);
return relationships == null
? new CountingCollector(topScoreDocCollector)
: new CountingCollector(topScoreDocCollector, relationships);
}
@Override
public Map<Integer, Map<Query, Float>> reduce(Collection<CountingCollector> collectors) {
Map<Integer, Map<Query, Float>> docCounts = new HashMap<>();
for (CountingCollector collector : collectors) {
docCounts.putAll(collector.docCounts);
}
return docCounts;
}
}
private static class CountingCollector extends FilterCollector {
public final Map<Integer, Map<Query, Float>> docCounts = new HashMap<>();
private final Map<Query, Scorer> subScorers = new HashMap<>();
private final Set<String> relationships;
public CountingCollector(Collector other) {
this(other, new HashSet<>(Arrays.asList("MUST", "SHOULD", "MUST_NOT")));
}
public CountingCollector(Collector other, Set<String> relationships) {
super(other);
this.relationships = relationships;
}
public void setSubScorers(Scorable scorer) throws IOException {
scorer = AssertingScorable.unwrap(scorer);
for (Scorable.ChildScorable child : scorer.getChildren()) {
if (relationships.contains(child.relationship)) {
setSubScorers(child.child);
}
}
subScorers.put(((Scorer) scorer).getWeight().getQuery(), (Scorer) scorer);
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
final int docBase = context.docBase;
return new FilterLeafCollector(super.getLeafCollector(context)) {
@Override
public void collect(int doc) throws IOException {
final Map<Query, Float> freqs = new HashMap<Query, Float>();
for (Map.Entry<Query, Scorer> ent : subScorers.entrySet()) {
Scorer value = ent.getValue();
int matchId = value.docID();
freqs.put(ent.getKey(), matchId == doc ? value.score() : 0.0f);
}
docCounts.put(doc + docBase, freqs);
super.collect(doc);
}
@Override
public void setScorer(Scorable scorer) throws IOException {
super.setScorer(scorer);
subScorers.clear();
setSubScorers(scorer);
}
};
}
}
private static final float FLOAT_TOLERANCE = 0.00001F;
@Test
public void testTermQuery() throws Exception {
TermQuery q = new TermQuery(new Term("f", "d"));
Map<Integer, Map<Query, Float>> docCounts = s.search(q, new CountingCollectorManager(null));
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, docCounts.size());
for (int i = 0; i < maxDocs; i++) {
Map<Query, Float> doc0 = docCounts.get(i);
assertEquals(1, doc0.size());
assertEquals(4.0F, doc0.get(q), FLOAT_TOLERANCE);
Map<Query, Float> doc1 = docCounts.get(++i);
assertEquals(1, doc1.size());
assertEquals(1.0F, doc1.get(q), FLOAT_TOLERANCE);
}
}
@Test
public void testBooleanQuery() throws Exception {
TermQuery aQuery = new TermQuery(new Term("f", "a"));
TermQuery dQuery = new TermQuery(new Term("f", "d"));
TermQuery cQuery = new TermQuery(new Term("f", "c"));
TermQuery yQuery = new TermQuery(new Term("f", "y"));
BooleanQuery.Builder query = new BooleanQuery.Builder();
BooleanQuery.Builder inner = new BooleanQuery.Builder();
inner.add(cQuery, Occur.SHOULD);
inner.add(yQuery, Occur.MUST_NOT);
query.add(inner.build(), Occur.MUST);
query.add(aQuery, Occur.MUST);
query.add(dQuery, Occur.MUST);
// Only needed in Java6; Java7+ has a @SafeVarargs annotated Arrays#asList()!
// see http://docs.oracle.com/javase/7/docs/api/java/lang/SafeVarargs.html
@SuppressWarnings("unchecked")
final Iterable<Set<String>> occurList =
Arrays.asList(
Collections.singleton("MUST"), new HashSet<>(Arrays.asList("MUST", "SHOULD")));
for (final Set<String> occur : occurList) {
Map<Integer, Map<Query, Float>> docCounts =
s.search(
new DisablingBulkScorerQuery(query.build()), new CountingCollectorManager(occur));
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, docCounts.size());
boolean includeOptional = occur.contains("SHOULD");
for (int i = 0; i < maxDocs; i++) {
Map<Query, Float> doc0 = docCounts.get(i);
// Y doesnt exist in the index, so it's not in the scorer tree
assertEquals(4, doc0.size());
assertEquals(1.0F, doc0.get(aQuery), FLOAT_TOLERANCE);
assertEquals(4.0F, doc0.get(dQuery), FLOAT_TOLERANCE);
if (includeOptional) {
assertEquals(3.0F, doc0.get(cQuery), FLOAT_TOLERANCE);
}
Map<Query, Float> doc1 = docCounts.get(++i);
// Y doesnt exist in the index, so it's not in the scorer tree
assertEquals(4, doc1.size());
assertEquals(1.0F, doc1.get(aQuery), FLOAT_TOLERANCE);
assertEquals(1.0F, doc1.get(dQuery), FLOAT_TOLERANCE);
if (includeOptional) {
assertEquals(1.0F, doc1.get(cQuery), FLOAT_TOLERANCE);
}
}
}
}
@Test
public void testPhraseQuery() throws Exception {
PhraseQuery q = new PhraseQuery("f", "b", "c");
Map<Integer, Map<Query, Float>> docCounts = s.search(q, new CountingCollectorManager(null));
final int maxDocs = s.getIndexReader().maxDoc();
assertEquals(maxDocs, docCounts.size());
for (int i = 0; i < maxDocs; i++) {
Map<Query, Float> doc0 = docCounts.get(i);
assertEquals(1, doc0.size());
assertEquals(2.0F, doc0.get(q), FLOAT_TOLERANCE);
Map<Query, Float> doc1 = docCounts.get(++i);
assertEquals(1, doc1.size());
assertEquals(1.0F, doc1.get(q), FLOAT_TOLERANCE);
}
}
// Similarity that just returns the frequency as the score
private static class CountingSimilarity extends Similarity {
@Override
public long computeNorm(FieldInvertState state) {
return 1;
}
@Override
public SimScorer scorer(
float boost, CollectionStatistics collectionStats, TermStatistics... termStats) {
return new SimScorer() {
@Override
public float score(float freq, long norm) {
return freq;
}
};
}
}
}

View File

@ -134,8 +134,7 @@ public class TestUsageTrackingFilterCachingPolicy extends LuceneTestCase {
return new ConstantScoreWeight(DummyQuery.this, boost) {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final var scorer =
new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(1));
final var scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(1));
return new DefaultScorerSupplier(scorer);
}

View File

@ -1024,8 +1024,7 @@ public class TestWANDScorer extends LuceneTestCase {
final Scorer scorer;
if (optionalScorers.size() > 0) {
scorer =
new WANDScorer(
weight, optionalScorers, query.getMinimumNumberShouldMatch(), scoreMode);
new WANDScorer(optionalScorers, query.getMinimumNumberShouldMatch(), scoreMode);
} else {
scorer = weight.scorer(context);
if (scorer == null) return null;

View File

@ -180,8 +180,7 @@ class DrillSidewaysQuery extends Query {
Scorer scorer = drillDowns[dim].scorer(context);
if (scorer == null) {
nullCount++;
scorer =
new ConstantScoreScorer(drillDowns[dim], 0f, scoreMode, DocIdSetIterator.empty());
scorer = new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty());
}
FacetsCollector sidewaysCollector = drillSidewaysCollectorManagers[dim].newCollector();

View File

@ -202,7 +202,7 @@ public final class DoubleRange extends Range {
return 100; // TODO: use cost of range.accept()
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
final var scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase);
return new DefaultScorerSupplier(scorer);
}
@ -309,7 +309,7 @@ public final class DoubleRange extends Range {
return 100; // TODO: use cost of range.accept()
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
final var scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase);
return new DefaultScorerSupplier(scorer);
}

View File

@ -189,7 +189,7 @@ public final class LongRange extends Range {
return 100; // TODO: use cost of range.accept()
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
final var scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase);
return new DefaultScorerSupplier(scorer);
}
@ -296,7 +296,7 @@ public final class LongRange extends Range {
return 100; // TODO: use cost of range.accept()
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
final var scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase);
return new DefaultScorerSupplier(scorer);
}

View File

@ -1314,7 +1314,6 @@ public class TestDrillSideways extends FacetTestCase {
DocIdSetIterator.all(context.reader().maxDoc());
final var scorer =
new ConstantScoreScorer(
this,
score(),
scoreMode,
new TwoPhaseIterator(approximation) {

View File

@ -21,7 +21,6 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
abstract class BaseGlobalOrdinalScorer extends Scorer {
@ -32,8 +31,7 @@ abstract class BaseGlobalOrdinalScorer extends Scorer {
float score;
public BaseGlobalOrdinalScorer(
Weight weight, SortedDocValues values, DocIdSetIterator approximationScorer, float boost) {
super(weight);
SortedDocValues values, DocIdSetIterator approximationScorer, float boost) {
this.values = values;
this.approximation = approximationScorer;
this.boost = boost;

View File

@ -184,7 +184,6 @@ final class GlobalOrdinalsQuery extends Query implements Accountable {
if (globalOrds != null) {
scorer =
new OrdinalMapScorer(
this,
score(),
foundOrds,
values,
@ -192,8 +191,7 @@ final class GlobalOrdinalsQuery extends Query implements Accountable {
globalOrds.getGlobalOrds(context.ord));
} else {
scorer =
new SegmentOrdinalScorer(
this, score(), foundOrds, values, approximationScorer.iterator());
new SegmentOrdinalScorer(score(), foundOrds, values, approximationScorer.iterator());
}
return new DefaultScorerSupplier(scorer);
}
@ -213,13 +211,12 @@ final class GlobalOrdinalsQuery extends Query implements Accountable {
final LongValues segmentOrdToGlobalOrdLookup;
public OrdinalMapScorer(
Weight weight,
float score,
LongBitSet foundOrds,
SortedDocValues values,
DocIdSetIterator approximationScorer,
LongValues segmentOrdToGlobalOrdLookup) {
super(weight, values, approximationScorer, 1);
super(values, approximationScorer, 1);
this.score = score;
this.foundOrds = foundOrds;
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
@ -254,12 +251,11 @@ final class GlobalOrdinalsQuery extends Query implements Accountable {
final LongBitSet foundOrds;
public SegmentOrdinalScorer(
Weight weight,
float score,
LongBitSet foundOrds,
SortedDocValues values,
DocIdSetIterator approximationScorer) {
super(weight, values, approximationScorer, 1);
super(values, approximationScorer, 1);
this.score = score;
this.foundOrds = foundOrds;
}

View File

@ -222,16 +222,13 @@ final class GlobalOrdinalsWithScoreQuery extends Query implements Accountable {
} else if (globalOrds != null) {
scorer =
new OrdinalMapScorer(
this,
collector,
boost,
values,
approximationScorer.iterator(),
globalOrds.getGlobalOrds(context.ord));
} else {
scorer =
new SegmentOrdinalScorer(
this, collector, values, boost, approximationScorer.iterator());
scorer = new SegmentOrdinalScorer(collector, values, boost, approximationScorer.iterator());
}
return new DefaultScorerSupplier(scorer);
}
@ -251,13 +248,12 @@ final class GlobalOrdinalsWithScoreQuery extends Query implements Accountable {
final GlobalOrdinalsWithScoreCollector collector;
public OrdinalMapScorer(
Weight weight,
GlobalOrdinalsWithScoreCollector collector,
float boost,
SortedDocValues values,
DocIdSetIterator approximation,
LongValues segmentOrdToGlobalOrdLookup) {
super(weight, values, approximation, boost);
super(values, approximation, boost);
this.segmentOrdToGlobalOrdLookup = segmentOrdToGlobalOrdLookup;
this.collector = collector;
}
@ -292,12 +288,11 @@ final class GlobalOrdinalsWithScoreQuery extends Query implements Accountable {
final GlobalOrdinalsWithScoreCollector collector;
public SegmentOrdinalScorer(
Weight weight,
GlobalOrdinalsWithScoreCollector collector,
SortedDocValues values,
float boost,
DocIdSetIterator approximation) {
super(weight, values, approximation, boost);
super(values, approximation, boost);
this.collector = collector;
}

View File

@ -177,7 +177,7 @@ public class ParentChildrenBlockJoinQuery extends Query {
}
};
final var scorer =
new Scorer(this) {
new Scorer() {
@Override
public int docID() {
return it.docID();

View File

@ -204,7 +204,7 @@ abstract class PointInSetIncludingScoreQuery extends Query implements Accountabl
float[] scores = new float[reader.maxDoc()];
values.intersect(new MergePointVisitor(sortedPackedPoints, result, scores));
final var scorer =
new Scorer(this) {
new Scorer() {
DocIdSetIterator disi = new BitSetIterator(result, 10L);

View File

@ -183,11 +183,9 @@ class TermsIncludingScoreQuery extends Query implements Accountable {
TermsEnum segmentTermsEnum = terms.iterator();
if (multipleValuesPerDocument) {
scorer =
new MVInOrderScorer(this, segmentTermsEnum, context.reader().maxDoc(), cost, boost);
scorer = new MVInOrderScorer(segmentTermsEnum, context.reader().maxDoc(), cost, boost);
} else {
scorer =
new SVInOrderScorer(this, segmentTermsEnum, context.reader().maxDoc(), cost, boost);
scorer = new SVInOrderScorer(segmentTermsEnum, context.reader().maxDoc(), cost, boost);
}
return new DefaultScorerSupplier(scorer);
}
@ -206,9 +204,7 @@ class TermsIncludingScoreQuery extends Query implements Accountable {
final long cost;
final float boost;
SVInOrderScorer(Weight weight, TermsEnum termsEnum, int maxDoc, long cost, float boost)
throws IOException {
super(weight);
SVInOrderScorer(TermsEnum termsEnum, int maxDoc, long cost, float boost) throws IOException {
FixedBitSet matchingDocs = new FixedBitSet(maxDoc);
this.scores = new float[maxDoc];
fillDocsAndScores(matchingDocs, termsEnum);
@ -263,9 +259,8 @@ class TermsIncludingScoreQuery extends Query implements Accountable {
// related documents.
class MVInOrderScorer extends SVInOrderScorer {
MVInOrderScorer(Weight weight, TermsEnum termsEnum, int maxDoc, long cost, float boost)
throws IOException {
super(weight, termsEnum, maxDoc, cost, boost);
MVInOrderScorer(TermsEnum termsEnum, int maxDoc, long cost, float boost) throws IOException {
super(termsEnum, maxDoc, cost, boost);
}
@Override

View File

@ -118,7 +118,7 @@ public class ToChildBlockJoinQuery extends Query {
return null;
}
final var scorer = new ToChildBlockJoinScorer(this, parentScorer, parents, doScores);
final var scorer = new ToChildBlockJoinScorer(parentScorer, parents, doScores);
return new DefaultScorerSupplier(scorer);
}
@ -148,9 +148,7 @@ public class ToChildBlockJoinQuery extends Query {
private int childDoc = -1;
private int parentDoc = 0;
public ToChildBlockJoinScorer(
Weight weight, Scorer parentScorer, BitSet parentBits, boolean doScores) {
super(weight);
public ToChildBlockJoinScorer(Scorer parentScorer, BitSet parentBits, boolean doScores) {
this.doScores = doScores;
this.parentBits = parentBits;
this.parentScorer = parentScorer;

View File

@ -148,8 +148,7 @@ public class ToParentBlockJoinQuery extends Query {
@Override
public Scorer get(long leadCost) throws IOException {
return new BlockJoinScorer(
BlockJoinWeight.this, childScorerSupplier.get(leadCost), parents, scoreMode);
return new BlockJoinScorer(childScorerSupplier.get(leadCost), parents, scoreMode);
}
@Override
@ -274,9 +273,7 @@ public class ToParentBlockJoinQuery extends Query {
private final ParentTwoPhase parentTwoPhase;
private float score;
public BlockJoinScorer(
Weight weight, Scorer childScorer, BitSet parentBits, ScoreMode scoreMode) {
super(weight);
public BlockJoinScorer(Scorer childScorer, BitSet parentBits, ScoreMode scoreMode) {
// System.out.println("Q.init firstChildDoc=" + firstChildDoc);
this.parentBits = parentBits;
this.childScorer = childScorer;

View File

@ -586,7 +586,7 @@ public class TestJoinUtil extends LuceneTestCase {
}
NumericDocValues price = context.reader().getNumericDocValues(field);
final var scorer =
new FilterScorer(fieldScorer, this) {
new FilterScorer(fieldScorer) {
@Override
public float score() throws IOException {
assertEquals(in.docID(), price.advance(in.docID()));

View File

@ -534,7 +534,7 @@ public class TestDiversifiedTopDocsCollector extends LuceneTestCase {
final var innerScorer = scorerSupplier.get(Long.MAX_VALUE);
NumericDocValues scoreFactors = DocValues.getNumeric(context.reader(), scoreField);
final var scorer =
new Scorer(this) {
new Scorer() {
@Override
public float score() throws IOException {

View File

@ -104,7 +104,7 @@ public final class FunctionMatchQuery extends Query {
return matchCost; // TODO maybe DoubleValuesSource should have a matchCost?
}
};
final var scorer = new ConstantScoreScorer(this, score(), scoreMode, twoPhase);
final var scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase);
return new DefaultScorerSupplier(scorer);
}

View File

@ -108,7 +108,6 @@ public class FunctionQuery extends Query {
final FunctionValues vals;
public AllScorer(LeafReaderContext context, FunctionWeight w, float boost) throws IOException {
super(w);
this.weight = w;
this.boost = boost;
this.reader = context.reader();

View File

@ -32,7 +32,7 @@ import org.apache.lucene.search.Weight;
* A Query wrapping a {@link ValueSource} that matches docs in which the values in the value source
* match a configured range. The score is the float value. This can be a slow query if run by itself
* since it must visit all docs; ideally it's combined with other queries. It's mostly a wrapper
* around {@link FunctionValues#getRangeScorer(Weight, LeafReaderContext, String, String, boolean,
* around {@link FunctionValues#getRangeScorer(LeafReaderContext, String, String, boolean,
* boolean)}.
*
* <p>A similar class is {@code org.apache.lucene.search.DocValuesRangeQuery} in the sandbox module.
@ -169,8 +169,7 @@ public class FunctionRangeQuery extends Query {
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
FunctionValues functionValues = valueSource.getValues(vsContext, context);
final var scorer =
functionValues.getRangeScorer(
this, context, lowerVal, upperVal, includeLower, includeUpper);
functionValues.getRangeScorer(context, lowerVal, upperVal, includeLower, includeUpper);
return new DefaultScorerSupplier(scorer);
}

View File

@ -20,7 +20,6 @@ import java.io.IOException;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueFloat;
@ -204,8 +203,8 @@ public abstract class FunctionValues {
* Yields a {@link Scorer} that matches all documents, and that which produces scores equal to
* {@link #floatVal(int)}.
*/
public ValueSourceScorer getScorer(Weight weight, LeafReaderContext readerContext) {
return new ValueSourceScorer(weight, readerContext, this) {
public ValueSourceScorer getScorer(LeafReaderContext readerContext) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) {
return true;
@ -227,7 +226,6 @@ public abstract class FunctionValues {
// a setup cost - parsing and normalizing params, and doing a binary search on the StringIndex.
// TODO: change "reader" to LeafReaderContext
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -252,7 +250,7 @@ public abstract class FunctionValues {
final float u = upper;
if (includeLower && includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -261,7 +259,7 @@ public abstract class FunctionValues {
}
};
} else if (includeLower && !includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -270,7 +268,7 @@ public abstract class FunctionValues {
}
};
} else if (!includeLower && includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -279,7 +277,7 @@ public abstract class FunctionValues {
}
};
} else {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -21,7 +21,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
/**
* {@link Scorer} which returns the result of {@link FunctionValues#floatVal(int)} as the score for
@ -43,9 +42,7 @@ public abstract class ValueSourceScorer extends Scorer {
private final TwoPhaseIterator twoPhaseIterator;
private final DocIdSetIterator disi;
protected ValueSourceScorer(
Weight weight, LeafReaderContext readerContext, FunctionValues values) {
super(weight);
protected ValueSourceScorer(LeafReaderContext readerContext, FunctionValues values) {
this.values = values;
final DocIdSetIterator approximation =
DocIdSetIterator.all(readerContext.reader().maxDoc()); // no approximation!

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
@ -117,7 +116,6 @@ public abstract class DocTermsIndexDocValues extends FunctionValues {
@Override
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -151,7 +149,7 @@ public abstract class DocTermsIndexDocValues extends FunctionValues {
final int ll = lower;
final int uu = upper;
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -21,7 +21,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueDouble;
@ -86,7 +85,6 @@ public abstract class DoubleDocValues extends FunctionValues {
@Override
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -110,7 +108,7 @@ public abstract class DoubleDocValues extends FunctionValues {
final double u = upper;
if (includeLower && includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -119,7 +117,7 @@ public abstract class DoubleDocValues extends FunctionValues {
}
};
} else if (includeLower && !includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -128,7 +126,7 @@ public abstract class DoubleDocValues extends FunctionValues {
}
};
} else if (!includeLower && includeUpper) {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;
@ -137,7 +135,7 @@ public abstract class DoubleDocValues extends FunctionValues {
}
};
} else {
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -21,7 +21,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueInt;
@ -81,7 +80,6 @@ public abstract class IntDocValues extends FunctionValues {
@Override
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -108,7 +106,7 @@ public abstract class IntDocValues extends FunctionValues {
final int ll = lower;
final int uu = upper;
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -21,7 +21,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.mutable.MutableValue;
import org.apache.lucene.util.mutable.MutableValueLong;
@ -90,7 +89,6 @@ public abstract class LongDocValues extends FunctionValues {
@Override
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -117,7 +115,7 @@ public abstract class LongDocValues extends FunctionValues {
final long ll = lower;
final long uu = upper;
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSourceScorer;
import org.apache.lucene.queries.function.docvalues.IntDocValues;
import org.apache.lucene.search.Weight;
/**
* Obtains int field values from {@link org.apache.lucene.index.LeafReader#getNumericDocValues} and
@ -135,7 +134,6 @@ public class EnumFieldSource extends FieldCacheSource {
@Override
public ValueSourceScorer getRangeScorer(
Weight weight,
LeafReaderContext readerContext,
String lowerVal,
String upperVal,
@ -161,7 +159,7 @@ public class EnumFieldSource extends FieldCacheSource {
final int ll = lower;
final int uu = upper;
return new ValueSourceScorer(weight, readerContext, this) {
return new ValueSourceScorer(readerContext, this) {
@Override
public boolean matches(int doc) throws IOException {
if (!exists(doc)) return false;

View File

@ -192,7 +192,7 @@ public final class IntervalQuery extends Query {
return null;
}
final var scorer =
new IntervalScorer(this, intervals, intervalsSource.minExtent(), boost, scoreFunction);
new IntervalScorer(intervals, intervalsSource.minExtent(), boost, scoreFunction);
return new DefaultScorerSupplier(scorer);
}

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.similarities.Similarity;
class IntervalScorer extends Scorer {
@ -35,12 +34,7 @@ class IntervalScorer extends Scorer {
private int lastScoredDoc = -1;
IntervalScorer(
Weight weight,
IntervalIterator intervals,
int minExtent,
float boost,
IntervalScoreFunction scoreFunction) {
super(weight);
IntervalIterator intervals, int minExtent, float boost, IntervalScoreFunction scoreFunction) {
this.intervals = intervals;
this.minExtent = minExtent;
this.boost = boost;

View File

@ -171,7 +171,7 @@ public class PayloadScoreQuery extends SpanQuery {
Explanation payloadExpl = scorer.getPayloadExplanation();
if (includeSpanScore) {
SpanWeight innerWeight = ((PayloadSpanWeight) scorer.getWeight()).innerWeight;
SpanWeight innerWeight = this.innerWeight;
Explanation innerExpl = innerWeight.explain(context, doc);
return Explanation.match(
scorer.scoreCurrentDoc(), "PayloadSpanQuery, product of:", innerExpl, payloadExpl);
@ -188,7 +188,7 @@ public class PayloadScoreQuery extends SpanQuery {
}
LeafSimScorer docScorer = innerWeight.getSimScorer(context);
PayloadSpans payloadSpans = new PayloadSpans(spans, decoder);
final var scorer = new PayloadSpanScorer(this, payloadSpans, docScorer);
final var scorer = new PayloadSpanScorer(payloadSpans, docScorer);
return new DefaultScorerSupplier(scorer);
}
}
@ -244,9 +244,8 @@ public class PayloadScoreQuery extends SpanQuery {
private final PayloadSpans spans;
private PayloadSpanScorer(SpanWeight weight, PayloadSpans spans, LeafSimScorer docScorer)
throws IOException {
super(weight, spans, docScorer);
private PayloadSpanScorer(PayloadSpans spans, LeafSimScorer docScorer) throws IOException {
super(spans, docScorer);
this.spans = spans;
}

View File

@ -192,7 +192,7 @@ public class SpanPayloadCheckQuery extends SpanQuery {
return null;
}
final LeafSimScorer docScorer = getSimScorer(context);
final var scorer = new SpanScorer(this, spans, docScorer);
final var scorer = new SpanScorer(spans, docScorer);
return new DefaultScorerSupplier(scorer);
}

View File

@ -140,13 +140,12 @@ public final class SpanContainingQuery extends SpanContainQuery {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final SpanWeight spanWeight = this;
final Spans spans = getSpans(context, Postings.POSITIONS);
if (spans == null) {
return null;
}
final LeafSimScorer docScorer = getSimScorer(context);
final var scorer = new SpanScorer(spanWeight, spans, docScorer);
final var scorer = new SpanScorer(spans, docScorer);
return new DefaultScorerSupplier(scorer);
}
}

View File

@ -243,13 +243,12 @@ public class SpanNearQuery extends SpanQuery implements Cloneable {
@Override
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
final SpanWeight spanWeight = this;
final Spans spans = getSpans(context, Postings.POSITIONS);
if (spans == null) {
return null;
}
final LeafSimScorer docScorer = getSimScorer(context);
final var scorer = new SpanScorer(spanWeight, spans, docScorer);
final var scorer = new SpanScorer(spans, docScorer);
return new DefaultScorerSupplier(scorer);
}
}

Some files were not shown because too many files have changed in this diff Show More