mirror of https://github.com/apache/lucene.git
LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.
This commit is contained in:
parent
e0213933dd
commit
625979113f
|
@ -107,6 +107,9 @@ Other
|
|||
|
||||
* LUCENE-7360: Explanation.toHtml() is deprecated. (Alan Woodward)
|
||||
|
||||
* LUCENE-7372: Factor out an org.apache.lucene.search.FilterWeight class.
|
||||
(Christine Poerschke, Adrien Grand, David Smiley)
|
||||
|
||||
======================= Lucene 6.1.0 =======================
|
||||
|
||||
New Features
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
/**
|
||||
* A {@code FilterWeight} contains another {@code Weight} and implements
|
||||
* all abstract methods by calling the contained weight's method.
|
||||
*
|
||||
* Note that {@code FilterWeight} does not override the non-abstract
|
||||
* {@link Weight#bulkScorer(LeafReaderContext)} method and subclasses of
|
||||
* {@code FilterWeight} must provide their bulkScorer implementation
|
||||
* if required.
|
||||
*
|
||||
* @lucene.internal
|
||||
*/
|
||||
public abstract class FilterWeight extends Weight {
|
||||
|
||||
final protected Weight in;
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
*/
|
||||
protected FilterWeight(Weight weight) {
|
||||
this(weight.getQuery(), weight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Alternative constructor.
|
||||
* Use this variant only if the <code>weight</code> was not obtained
|
||||
* via the {@link Query#createWeight(IndexSearcher, boolean)}
|
||||
* method of the <code>query</code> object.
|
||||
*/
|
||||
protected FilterWeight(Query query, Weight weight) {
|
||||
super(query);
|
||||
this.in = weight;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
in.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return in.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
in.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return in.scorer(context);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.lucene.search;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestFilterWeight extends LuceneTestCase {
|
||||
|
||||
@Test
|
||||
public void testDeclaredMethodsOverridden() throws Exception {
|
||||
final Class<?> subClass = FilterWeight.class;
|
||||
implTestDeclaredMethodsOverridden(subClass.getSuperclass(), subClass);
|
||||
}
|
||||
|
||||
private void implTestDeclaredMethodsOverridden(Class<?> superClass, Class<?> subClass) throws Exception {
|
||||
for (final Method superClassMethod : superClass.getDeclaredMethods()) {
|
||||
final int modifiers = superClassMethod.getModifiers();
|
||||
if (Modifier.isFinal(modifiers)) continue;
|
||||
if (Modifier.isStatic(modifiers)) continue;
|
||||
if (superClassMethod.getName().equals("bulkScorer")) {
|
||||
try {
|
||||
final Method subClassMethod = subClass.getDeclaredMethod(
|
||||
superClassMethod.getName(),
|
||||
superClassMethod.getParameterTypes());
|
||||
fail(subClass + " must not override\n'" + superClassMethod + "'"
|
||||
+ " but it does override\n'" + subClassMethod + "'");
|
||||
} catch (NoSuchMethodException e) {
|
||||
/* FilterWeight must not override the bulkScorer method
|
||||
* since as of July 2016 not all deriving classes use the
|
||||
* {code}return in.bulkScorer(content);{code}
|
||||
* implementation that FilterWeight.bulkScorer would use.
|
||||
*/
|
||||
continue;
|
||||
}
|
||||
}
|
||||
try {
|
||||
final Method subClassMethod = subClass.getDeclaredMethod(
|
||||
superClassMethod.getName(),
|
||||
superClassMethod.getParameterTypes());
|
||||
assertEquals("getReturnType() difference",
|
||||
superClassMethod.getReturnType(),
|
||||
subClassMethod.getReturnType());
|
||||
} catch (NoSuchMethodException e) {
|
||||
fail(subClass + " needs to override '" + superClassMethod + "'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -29,7 +29,6 @@ import java.util.HashSet;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
@ -1088,39 +1087,17 @@ public class TestLRUQueryCache extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static class WeightWrapper extends Weight {
|
||||
private static class WeightWrapper extends FilterWeight {
|
||||
|
||||
private final Weight in;
|
||||
private final AtomicBoolean scorerCalled;
|
||||
private final AtomicBoolean bulkScorerCalled;
|
||||
|
||||
protected WeightWrapper(Weight in, AtomicBoolean scorerCalled, AtomicBoolean bulkScorerCalled) {
|
||||
super(in.getQuery());
|
||||
this.in = in;
|
||||
super(in);
|
||||
this.scorerCalled = scorerCalled;
|
||||
this.bulkScorerCalled = bulkScorerCalled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
in.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return in.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
in.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
scorerCalled.set(true);
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.index.SortedDocValues;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.FilterWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -102,13 +103,10 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
|||
'}';
|
||||
}
|
||||
|
||||
final class W extends Weight {
|
||||
|
||||
private final Weight approximationWeight;
|
||||
final class W extends FilterWeight {
|
||||
|
||||
W(Query query, Weight approximationWeight) {
|
||||
super(query);
|
||||
this.approximationWeight = approximationWeight;
|
||||
super(query, approximationWeight);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -159,7 +157,7 @@ final class GlobalOrdinalsWithScoreQuery extends Query {
|
|||
return null;
|
||||
}
|
||||
|
||||
Scorer approximationScorer = approximationWeight.scorer(context);
|
||||
Scorer approximationScorer = in.scorer(context);
|
||||
if (approximationScorer == null) {
|
||||
return null;
|
||||
} else if (globalOrds != null) {
|
||||
|
|
|
@ -20,11 +20,9 @@ import java.io.IOException;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.FilterWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -90,39 +88,22 @@ public class ToChildBlockJoinQuery extends Query {
|
|||
return parentQuery;
|
||||
}
|
||||
|
||||
private static class ToChildBlockJoinWeight extends Weight {
|
||||
private final Weight parentWeight;
|
||||
private static class ToChildBlockJoinWeight extends FilterWeight {
|
||||
private final BitSetProducer parentsFilter;
|
||||
private final boolean doScores;
|
||||
|
||||
public ToChildBlockJoinWeight(Query joinQuery, Weight parentWeight, BitSetProducer parentsFilter, boolean doScores) {
|
||||
super(joinQuery);
|
||||
this.parentWeight = parentWeight;
|
||||
super(joinQuery, parentWeight);
|
||||
this.parentsFilter = parentsFilter;
|
||||
this.doScores = doScores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
parentWeight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return parentWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
parentWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
// NOTE: acceptDocs applies (and is checked) only in the
|
||||
// child document space
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext readerContext) throws IOException {
|
||||
|
||||
final Scorer parentScorer = parentWeight.scorer(readerContext);
|
||||
final Scorer parentScorer = in.scorer(readerContext);
|
||||
|
||||
if (parentScorer == null) {
|
||||
// No matches
|
||||
|
@ -148,7 +129,7 @@ public class ToChildBlockJoinQuery extends Query {
|
|||
return Explanation.match(
|
||||
scorer.score(),
|
||||
String.format(Locale.ROOT, "Score based on parent document %d", parentDoc + context.docBase),
|
||||
parentWeight.explain(context, parentDoc)
|
||||
in.explain(context, parentDoc)
|
||||
);
|
||||
}
|
||||
return Explanation.noMatch("Not a match");
|
||||
|
|
|
@ -20,12 +20,10 @@ import java.io.IOException;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.FilterWeight;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -124,39 +122,22 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
return childQuery;
|
||||
}
|
||||
|
||||
private static class BlockJoinWeight extends Weight {
|
||||
private final Weight childWeight;
|
||||
private static class BlockJoinWeight extends FilterWeight {
|
||||
private final BitSetProducer parentsFilter;
|
||||
private final ScoreMode scoreMode;
|
||||
|
||||
public BlockJoinWeight(Query joinQuery, Weight childWeight, BitSetProducer parentsFilter, ScoreMode scoreMode) {
|
||||
super(joinQuery);
|
||||
this.childWeight = childWeight;
|
||||
super(joinQuery, childWeight);
|
||||
this.parentsFilter = parentsFilter;
|
||||
this.scoreMode = scoreMode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
childWeight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return childWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
childWeight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
// NOTE: acceptDocs applies (and is checked) only in the
|
||||
// parent document space
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext readerContext) throws IOException {
|
||||
|
||||
final Scorer childScorer = childWeight.scorer(readerContext);
|
||||
final Scorer childScorer = in.scorer(readerContext);
|
||||
if (childScorer == null) {
|
||||
// No matches
|
||||
return null;
|
||||
|
@ -184,7 +165,7 @@ public class ToParentBlockJoinQuery extends Query {
|
|||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
BlockJoinScorer scorer = (BlockJoinScorer) scorer(context);
|
||||
if (scorer != null && scorer.iterator().advance(doc) == doc) {
|
||||
return scorer.explain(context, childWeight);
|
||||
return scorer.explain(context, in);
|
||||
}
|
||||
return Explanation.noMatch("Not a match");
|
||||
}
|
||||
|
|
|
@ -18,44 +18,19 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
class AssertingWeight extends Weight {
|
||||
class AssertingWeight extends FilterWeight {
|
||||
|
||||
final Random random;
|
||||
final Weight in;
|
||||
final boolean needsScores;
|
||||
|
||||
AssertingWeight(Random random, Weight in, boolean needsScores) {
|
||||
super(in.getQuery());
|
||||
super(in);
|
||||
this.random = random;
|
||||
this.in = in;
|
||||
this.needsScores = needsScores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
in.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return in.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
in.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
final Scorer inScorer = in.scorer(context);
|
||||
|
|
|
@ -18,13 +18,10 @@ package org.apache.lucene.search;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
/**
|
||||
* A {@link Query} that adds random approximations to its scorers.
|
||||
|
@ -70,40 +67,18 @@ public class RandomApproximationQuery extends Query {
|
|||
return new RandomApproximationWeight(weight, new Random(random.nextLong()));
|
||||
}
|
||||
|
||||
private static class RandomApproximationWeight extends Weight {
|
||||
private static class RandomApproximationWeight extends FilterWeight {
|
||||
|
||||
private final Weight weight;
|
||||
private final Random random;
|
||||
|
||||
RandomApproximationWeight(Weight weight, Random random) {
|
||||
super(weight.getQuery());
|
||||
this.weight = weight;
|
||||
super(weight);
|
||||
this.random = random;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
weight.extractTerms(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
return weight.explain(context, doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return weight.getValueForNormalization();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void normalize(float norm, float boost) {
|
||||
weight.normalize(norm, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
final Scorer scorer = weight.scorer(context);
|
||||
final Scorer scorer = in.scorer(context);
|
||||
if (scorer == null) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -17,8 +17,6 @@
|
|||
package org.apache.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
|
||||
|
@ -78,11 +76,9 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public static final class BrokenExplainWeight extends Weight {
|
||||
final Weight in;
|
||||
public static final class BrokenExplainWeight extends FilterWeight {
|
||||
public BrokenExplainWeight(BrokenExplainTermQuery q, Weight in) {
|
||||
super(q);
|
||||
this.in = in;
|
||||
super(q, in);
|
||||
}
|
||||
public BulkScorer bulkScorer(LeafReaderContext context) throws IOException {
|
||||
return in.bulkScorer(context);
|
||||
|
@ -104,17 +100,5 @@ public class TestBaseExplanationTestCase extends BaseExplanationTestCase {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
in.extractTerms(terms);
|
||||
}
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return in.getValueForNormalization();
|
||||
}
|
||||
public void normalize(float norm, float boost) {
|
||||
in.normalize(norm, boost);
|
||||
}
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return in.scorer(context);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,14 +20,12 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import com.carrotsearch.hppc.IntFloatHashMap;
|
||||
import com.carrotsearch.hppc.IntIntHashMap;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.FilterWeight;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
|
@ -35,7 +33,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryRescorer;
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopDocsCollector;
|
||||
|
@ -179,40 +176,20 @@ public class ReRankQParserPlugin extends QParserPlugin {
|
|||
}
|
||||
}
|
||||
|
||||
private class ReRankWeight extends Weight{
|
||||
private class ReRankWeight extends FilterWeight {
|
||||
private Query reRankQuery;
|
||||
private IndexSearcher searcher;
|
||||
private Weight mainWeight;
|
||||
private double reRankWeight;
|
||||
|
||||
public ReRankWeight(Query mainQuery, Query reRankQuery, double reRankWeight, IndexSearcher searcher, boolean needsScores) throws IOException {
|
||||
super(mainQuery);
|
||||
super(mainQuery, mainQuery.createWeight(searcher, needsScores));
|
||||
this.reRankQuery = reRankQuery;
|
||||
this.searcher = searcher;
|
||||
this.reRankWeight = reRankWeight;
|
||||
this.mainWeight = mainQuery.createWeight(searcher, needsScores);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void extractTerms(Set<Term> terms) {
|
||||
this.mainWeight.extractTerms(terms);
|
||||
|
||||
}
|
||||
|
||||
public float getValueForNormalization() throws IOException {
|
||||
return mainWeight.getValueForNormalization();
|
||||
}
|
||||
|
||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
||||
return mainWeight.scorer(context);
|
||||
}
|
||||
|
||||
public void normalize(float norm, float topLevelBoost) {
|
||||
mainWeight.normalize(norm, topLevelBoost);
|
||||
}
|
||||
|
||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||
Explanation mainExplain = mainWeight.explain(context, doc);
|
||||
Explanation mainExplain = in.explain(context, doc);
|
||||
return new QueryRescorer(reRankQuery) {
|
||||
@Override
|
||||
protected float combine(float firstPassScore, boolean secondPassMatches, float secondPassScore) {
|
||||
|
|
Loading…
Reference in New Issue